diff --git a/.gitattributes b/.gitattributes index ea0925ced1944afd2727da58ce1898ff87a898d4..d4261ea3b42d77e7a24a53fece742d98f02ff257 100644 --- a/.gitattributes +++ b/.gitattributes @@ -455,3 +455,21 @@ HVU_QA/t5-viet-qg-finetuned/checkpoint-74500/spiece.model filter=lfs diff=lfs me HVU_QA/t5-viet-qg-finetuned/checkpoint-75000/model.safetensors filter=lfs diff=lfs merge=lfs -text HVU_QA/t5-viet-qg-finetuned/checkpoint-75000/optimizer.pt filter=lfs diff=lfs merge=lfs -text HVU_QA/t5-viet-qg-finetuned/checkpoint-75000/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/spiece.model filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/model.safetensors filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/optimizer.pt filter=lfs diff=lfs merge=lfs -text +HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/spiece.model filter=lfs diff=lfs merge=lfs -text diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..441490fe56cffd3759021c87640bd31cffc6f0c4 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffa0e981ee0d0ac307abb9f4ef1c36188c44cc2372480fd7994956e6d17b329a +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..bb8f70a4b0a92a83e3d167e6b42f1eb3bb823bc6 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffef2d25f6448810cfbbd850c8fbf94a363ba127e6a2c804da940aec6e01c43c +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..305c3724e177f9d3c751969e7a65a73a03ede7c8 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..f6f7fabf0d3a71006a2c47a955bde146f21d21db Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..4cf38de772e4d2392dd1b4d85ca9a6904e14d2b3 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/trainer_state.json @@ -0,0 +1,52884 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.915170469902313, + "eval_steps": 500, + "global_step": 75500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.597632073728e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-75500/training_args.bin differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ab0a978ce5d7da660d78e9c556afb8fea8cf31f9 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1152910614c30ca1d5428ef0a7087d93122d191dafc9f30ac883d967c9016061 +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..393ef61e1875f0ba59fefea8137f12f8d65182a0 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1904a764d7d9e50411105ceb705ab84b808979bd5e9c573aef1568b1f58dbcd3 +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..a11feaba69e6fa56bf984c286a82800e3def0a12 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..1eeb3506af187e718fd6a80827e9cfc5caf13246 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..8e73d1e57666a3c26634b6c27f2559a0072397f2 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/trainer_state.json @@ -0,0 +1,53234 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.9344762346036526, + "eval_steps": 500, + "global_step": 76000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + }, + { + "epoch": 2.91555658519634, + "grad_norm": 2.389887809753418, + "learning_rate": 5.63213508887087e-06, + "loss": 0.1397, + "step": 75510 + }, + { + "epoch": 2.9159427004903664, + "grad_norm": 0.4746516942977905, + "learning_rate": 5.6063940692690835e-06, + "loss": 0.1362, + "step": 75520 + }, + { + "epoch": 2.916328815784393, + "grad_norm": 1.1355524063110352, + "learning_rate": 5.580653049667298e-06, + "loss": 0.1772, + "step": 75530 + }, + { + "epoch": 2.91671493107842, + "grad_norm": 0.18736037611961365, + "learning_rate": 5.554912030065512e-06, + "loss": 0.1449, + "step": 75540 + }, + { + "epoch": 2.917101046372447, + "grad_norm": 2.575685501098633, + "learning_rate": 5.529171010463724e-06, + "loss": 0.1871, + "step": 75550 + }, + { + "epoch": 2.9174871616664735, + "grad_norm": 1.01112699508667, + "learning_rate": 5.5034299908619385e-06, + "loss": 0.1588, + "step": 75560 + }, + { + "epoch": 2.9178732769605005, + "grad_norm": 1.8929286003112793, + "learning_rate": 5.477688971260152e-06, + "loss": 0.1564, + "step": 75570 + }, + { + "epoch": 2.9182593922545275, + "grad_norm": 0.21724069118499756, + "learning_rate": 5.451947951658366e-06, + "loss": 0.2152, + "step": 75580 + }, + { + "epoch": 2.918645507548554, + "grad_norm": 1.2024441957473755, + "learning_rate": 5.426206932056578e-06, + "loss": 0.3032, + "step": 75590 + }, + { + "epoch": 2.9190316228425806, + "grad_norm": 0.14324747025966644, + "learning_rate": 5.4004659124547925e-06, + "loss": 0.0674, + "step": 75600 + }, + { + "epoch": 2.9194177381366075, + "grad_norm": 1.427650809288025, + "learning_rate": 5.374724892853006e-06, + "loss": 0.1086, + "step": 75610 + }, + { + "epoch": 2.9198038534306345, + "grad_norm": 1.2401851415634155, + "learning_rate": 5.34898387325122e-06, + "loss": 0.0944, + "step": 75620 + }, + { + "epoch": 2.920189968724661, + "grad_norm": 1.4218640327453613, + "learning_rate": 5.323242853649433e-06, + "loss": 0.1386, + "step": 75630 + }, + { + "epoch": 2.920576084018688, + "grad_norm": 1.0168864727020264, + "learning_rate": 5.297501834047647e-06, + "loss": 0.1291, + "step": 75640 + }, + { + "epoch": 2.9209621993127146, + "grad_norm": 0.2599659860134125, + "learning_rate": 5.271760814445861e-06, + "loss": 0.1055, + "step": 75650 + }, + { + "epoch": 2.9213483146067416, + "grad_norm": 2.1232173442840576, + "learning_rate": 5.246019794844074e-06, + "loss": 0.1521, + "step": 75660 + }, + { + "epoch": 2.921734429900768, + "grad_norm": 0.6988056302070618, + "learning_rate": 5.220278775242287e-06, + "loss": 0.1584, + "step": 75670 + }, + { + "epoch": 2.922120545194795, + "grad_norm": 1.766686201095581, + "learning_rate": 5.194537755640501e-06, + "loss": 0.1888, + "step": 75680 + }, + { + "epoch": 2.922506660488822, + "grad_norm": 1.17173433303833, + "learning_rate": 5.168796736038715e-06, + "loss": 0.1362, + "step": 75690 + }, + { + "epoch": 2.9228927757828487, + "grad_norm": 0.10190659016370773, + "learning_rate": 5.143055716436929e-06, + "loss": 0.1048, + "step": 75700 + }, + { + "epoch": 2.9232788910768757, + "grad_norm": 0.24241623282432556, + "learning_rate": 5.1173146968351415e-06, + "loss": 0.0925, + "step": 75710 + }, + { + "epoch": 2.923665006370902, + "grad_norm": 0.027136487886309624, + "learning_rate": 5.091573677233356e-06, + "loss": 0.1881, + "step": 75720 + }, + { + "epoch": 2.924051121664929, + "grad_norm": 0.7799992561340332, + "learning_rate": 5.065832657631569e-06, + "loss": 0.1733, + "step": 75730 + }, + { + "epoch": 2.9244372369589557, + "grad_norm": 0.11681391298770905, + "learning_rate": 5.040091638029783e-06, + "loss": 0.0768, + "step": 75740 + }, + { + "epoch": 2.9248233522529827, + "grad_norm": 3.007784366607666, + "learning_rate": 5.014350618427996e-06, + "loss": 0.2014, + "step": 75750 + }, + { + "epoch": 2.9252094675470097, + "grad_norm": 0.8907320499420166, + "learning_rate": 4.98860959882621e-06, + "loss": 0.3129, + "step": 75760 + }, + { + "epoch": 2.9255955828410363, + "grad_norm": 0.960918128490448, + "learning_rate": 4.962868579224423e-06, + "loss": 0.0904, + "step": 75770 + }, + { + "epoch": 2.9259816981350633, + "grad_norm": 1.1451547145843506, + "learning_rate": 4.937127559622637e-06, + "loss": 0.1966, + "step": 75780 + }, + { + "epoch": 2.92636781342909, + "grad_norm": 0.0598277747631073, + "learning_rate": 4.9113865400208505e-06, + "loss": 0.178, + "step": 75790 + }, + { + "epoch": 2.926753928723117, + "grad_norm": 2.012179374694824, + "learning_rate": 4.885645520419064e-06, + "loss": 0.1208, + "step": 75800 + }, + { + "epoch": 2.9271400440171433, + "grad_norm": 0.29047757387161255, + "learning_rate": 4.859904500817278e-06, + "loss": 0.1803, + "step": 75810 + }, + { + "epoch": 2.9275261593111703, + "grad_norm": 0.23592634499073029, + "learning_rate": 4.834163481215491e-06, + "loss": 0.1307, + "step": 75820 + }, + { + "epoch": 2.9279122746051973, + "grad_norm": 0.6524437069892883, + "learning_rate": 4.808422461613705e-06, + "loss": 0.1102, + "step": 75830 + }, + { + "epoch": 2.928298389899224, + "grad_norm": 4.513589382171631, + "learning_rate": 4.782681442011918e-06, + "loss": 0.3142, + "step": 75840 + }, + { + "epoch": 2.928684505193251, + "grad_norm": 0.2516826093196869, + "learning_rate": 4.756940422410132e-06, + "loss": 0.1691, + "step": 75850 + }, + { + "epoch": 2.9290706204872774, + "grad_norm": 0.05350786820054054, + "learning_rate": 4.731199402808345e-06, + "loss": 0.1919, + "step": 75860 + }, + { + "epoch": 2.9294567357813044, + "grad_norm": 0.23492521047592163, + "learning_rate": 4.705458383206559e-06, + "loss": 0.1481, + "step": 75870 + }, + { + "epoch": 2.929842851075331, + "grad_norm": 1.1959890127182007, + "learning_rate": 4.679717363604773e-06, + "loss": 0.1008, + "step": 75880 + }, + { + "epoch": 2.930228966369358, + "grad_norm": 1.4260644912719727, + "learning_rate": 4.653976344002986e-06, + "loss": 0.0918, + "step": 75890 + }, + { + "epoch": 2.930615081663385, + "grad_norm": 1.236479640007019, + "learning_rate": 4.6282353244011995e-06, + "loss": 0.0863, + "step": 75900 + }, + { + "epoch": 2.9310011969574115, + "grad_norm": 1.4055296182632446, + "learning_rate": 4.602494304799413e-06, + "loss": 0.176, + "step": 75910 + }, + { + "epoch": 2.931387312251438, + "grad_norm": 1.5062698125839233, + "learning_rate": 4.576753285197627e-06, + "loss": 0.1485, + "step": 75920 + }, + { + "epoch": 2.931773427545465, + "grad_norm": 0.586919367313385, + "learning_rate": 4.551012265595841e-06, + "loss": 0.1995, + "step": 75930 + }, + { + "epoch": 2.932159542839492, + "grad_norm": 0.755504310131073, + "learning_rate": 4.525271245994054e-06, + "loss": 0.1654, + "step": 75940 + }, + { + "epoch": 2.9325456581335185, + "grad_norm": 0.12576620280742645, + "learning_rate": 4.499530226392268e-06, + "loss": 0.0948, + "step": 75950 + }, + { + "epoch": 2.9329317734275455, + "grad_norm": 0.9442972540855408, + "learning_rate": 4.473789206790481e-06, + "loss": 0.2244, + "step": 75960 + }, + { + "epoch": 2.9333178887215725, + "grad_norm": 0.026888804510235786, + "learning_rate": 4.448048187188695e-06, + "loss": 0.0444, + "step": 75970 + }, + { + "epoch": 2.933704004015599, + "grad_norm": 0.789533257484436, + "learning_rate": 4.422307167586908e-06, + "loss": 0.1891, + "step": 75980 + }, + { + "epoch": 2.9340901193096256, + "grad_norm": 1.3214176893234253, + "learning_rate": 4.396566147985122e-06, + "loss": 0.1072, + "step": 75990 + }, + { + "epoch": 2.9344762346036526, + "grad_norm": 1.0256754159927368, + "learning_rate": 4.370825128383335e-06, + "loss": 0.198, + "step": 76000 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.628079968256e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76000/training_args.bin differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cbef5615297aa6c3ebc63b3ab9f15814216fe4b3 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2706c7341773cf193e8bc6c2ad0120ef23bf74a58b8df6faec82d3aaec602212 +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..24c22d0e6c593af5ba8975a04effe965ee4b0c46 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:624283006f1b220aec4166ad22e276c58571a258cac8d235fe5c53466200a370 +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..ceec9d6bcf1a79f86ddfa6896474661c124ec0d0 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..4752a51ec47fe2d34a972e9636d652b6180eaafe Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..88e8b91574fa2113fb7ff4ef90368ae64d1b90e6 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/trainer_state.json @@ -0,0 +1,53584 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.9537819993049927, + "eval_steps": 500, + "global_step": 76500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + }, + { + "epoch": 2.91555658519634, + "grad_norm": 2.389887809753418, + "learning_rate": 5.63213508887087e-06, + "loss": 0.1397, + "step": 75510 + }, + { + "epoch": 2.9159427004903664, + "grad_norm": 0.4746516942977905, + "learning_rate": 5.6063940692690835e-06, + "loss": 0.1362, + "step": 75520 + }, + { + "epoch": 2.916328815784393, + "grad_norm": 1.1355524063110352, + "learning_rate": 5.580653049667298e-06, + "loss": 0.1772, + "step": 75530 + }, + { + "epoch": 2.91671493107842, + "grad_norm": 0.18736037611961365, + "learning_rate": 5.554912030065512e-06, + "loss": 0.1449, + "step": 75540 + }, + { + "epoch": 2.917101046372447, + "grad_norm": 2.575685501098633, + "learning_rate": 5.529171010463724e-06, + "loss": 0.1871, + "step": 75550 + }, + { + "epoch": 2.9174871616664735, + "grad_norm": 1.01112699508667, + "learning_rate": 5.5034299908619385e-06, + "loss": 0.1588, + "step": 75560 + }, + { + "epoch": 2.9178732769605005, + "grad_norm": 1.8929286003112793, + "learning_rate": 5.477688971260152e-06, + "loss": 0.1564, + "step": 75570 + }, + { + "epoch": 2.9182593922545275, + "grad_norm": 0.21724069118499756, + "learning_rate": 5.451947951658366e-06, + "loss": 0.2152, + "step": 75580 + }, + { + "epoch": 2.918645507548554, + "grad_norm": 1.2024441957473755, + "learning_rate": 5.426206932056578e-06, + "loss": 0.3032, + "step": 75590 + }, + { + "epoch": 2.9190316228425806, + "grad_norm": 0.14324747025966644, + "learning_rate": 5.4004659124547925e-06, + "loss": 0.0674, + "step": 75600 + }, + { + "epoch": 2.9194177381366075, + "grad_norm": 1.427650809288025, + "learning_rate": 5.374724892853006e-06, + "loss": 0.1086, + "step": 75610 + }, + { + "epoch": 2.9198038534306345, + "grad_norm": 1.2401851415634155, + "learning_rate": 5.34898387325122e-06, + "loss": 0.0944, + "step": 75620 + }, + { + "epoch": 2.920189968724661, + "grad_norm": 1.4218640327453613, + "learning_rate": 5.323242853649433e-06, + "loss": 0.1386, + "step": 75630 + }, + { + "epoch": 2.920576084018688, + "grad_norm": 1.0168864727020264, + "learning_rate": 5.297501834047647e-06, + "loss": 0.1291, + "step": 75640 + }, + { + "epoch": 2.9209621993127146, + "grad_norm": 0.2599659860134125, + "learning_rate": 5.271760814445861e-06, + "loss": 0.1055, + "step": 75650 + }, + { + "epoch": 2.9213483146067416, + "grad_norm": 2.1232173442840576, + "learning_rate": 5.246019794844074e-06, + "loss": 0.1521, + "step": 75660 + }, + { + "epoch": 2.921734429900768, + "grad_norm": 0.6988056302070618, + "learning_rate": 5.220278775242287e-06, + "loss": 0.1584, + "step": 75670 + }, + { + "epoch": 2.922120545194795, + "grad_norm": 1.766686201095581, + "learning_rate": 5.194537755640501e-06, + "loss": 0.1888, + "step": 75680 + }, + { + "epoch": 2.922506660488822, + "grad_norm": 1.17173433303833, + "learning_rate": 5.168796736038715e-06, + "loss": 0.1362, + "step": 75690 + }, + { + "epoch": 2.9228927757828487, + "grad_norm": 0.10190659016370773, + "learning_rate": 5.143055716436929e-06, + "loss": 0.1048, + "step": 75700 + }, + { + "epoch": 2.9232788910768757, + "grad_norm": 0.24241623282432556, + "learning_rate": 5.1173146968351415e-06, + "loss": 0.0925, + "step": 75710 + }, + { + "epoch": 2.923665006370902, + "grad_norm": 0.027136487886309624, + "learning_rate": 5.091573677233356e-06, + "loss": 0.1881, + "step": 75720 + }, + { + "epoch": 2.924051121664929, + "grad_norm": 0.7799992561340332, + "learning_rate": 5.065832657631569e-06, + "loss": 0.1733, + "step": 75730 + }, + { + "epoch": 2.9244372369589557, + "grad_norm": 0.11681391298770905, + "learning_rate": 5.040091638029783e-06, + "loss": 0.0768, + "step": 75740 + }, + { + "epoch": 2.9248233522529827, + "grad_norm": 3.007784366607666, + "learning_rate": 5.014350618427996e-06, + "loss": 0.2014, + "step": 75750 + }, + { + "epoch": 2.9252094675470097, + "grad_norm": 0.8907320499420166, + "learning_rate": 4.98860959882621e-06, + "loss": 0.3129, + "step": 75760 + }, + { + "epoch": 2.9255955828410363, + "grad_norm": 0.960918128490448, + "learning_rate": 4.962868579224423e-06, + "loss": 0.0904, + "step": 75770 + }, + { + "epoch": 2.9259816981350633, + "grad_norm": 1.1451547145843506, + "learning_rate": 4.937127559622637e-06, + "loss": 0.1966, + "step": 75780 + }, + { + "epoch": 2.92636781342909, + "grad_norm": 0.0598277747631073, + "learning_rate": 4.9113865400208505e-06, + "loss": 0.178, + "step": 75790 + }, + { + "epoch": 2.926753928723117, + "grad_norm": 2.012179374694824, + "learning_rate": 4.885645520419064e-06, + "loss": 0.1208, + "step": 75800 + }, + { + "epoch": 2.9271400440171433, + "grad_norm": 0.29047757387161255, + "learning_rate": 4.859904500817278e-06, + "loss": 0.1803, + "step": 75810 + }, + { + "epoch": 2.9275261593111703, + "grad_norm": 0.23592634499073029, + "learning_rate": 4.834163481215491e-06, + "loss": 0.1307, + "step": 75820 + }, + { + "epoch": 2.9279122746051973, + "grad_norm": 0.6524437069892883, + "learning_rate": 4.808422461613705e-06, + "loss": 0.1102, + "step": 75830 + }, + { + "epoch": 2.928298389899224, + "grad_norm": 4.513589382171631, + "learning_rate": 4.782681442011918e-06, + "loss": 0.3142, + "step": 75840 + }, + { + "epoch": 2.928684505193251, + "grad_norm": 0.2516826093196869, + "learning_rate": 4.756940422410132e-06, + "loss": 0.1691, + "step": 75850 + }, + { + "epoch": 2.9290706204872774, + "grad_norm": 0.05350786820054054, + "learning_rate": 4.731199402808345e-06, + "loss": 0.1919, + "step": 75860 + }, + { + "epoch": 2.9294567357813044, + "grad_norm": 0.23492521047592163, + "learning_rate": 4.705458383206559e-06, + "loss": 0.1481, + "step": 75870 + }, + { + "epoch": 2.929842851075331, + "grad_norm": 1.1959890127182007, + "learning_rate": 4.679717363604773e-06, + "loss": 0.1008, + "step": 75880 + }, + { + "epoch": 2.930228966369358, + "grad_norm": 1.4260644912719727, + "learning_rate": 4.653976344002986e-06, + "loss": 0.0918, + "step": 75890 + }, + { + "epoch": 2.930615081663385, + "grad_norm": 1.236479640007019, + "learning_rate": 4.6282353244011995e-06, + "loss": 0.0863, + "step": 75900 + }, + { + "epoch": 2.9310011969574115, + "grad_norm": 1.4055296182632446, + "learning_rate": 4.602494304799413e-06, + "loss": 0.176, + "step": 75910 + }, + { + "epoch": 2.931387312251438, + "grad_norm": 1.5062698125839233, + "learning_rate": 4.576753285197627e-06, + "loss": 0.1485, + "step": 75920 + }, + { + "epoch": 2.931773427545465, + "grad_norm": 0.586919367313385, + "learning_rate": 4.551012265595841e-06, + "loss": 0.1995, + "step": 75930 + }, + { + "epoch": 2.932159542839492, + "grad_norm": 0.755504310131073, + "learning_rate": 4.525271245994054e-06, + "loss": 0.1654, + "step": 75940 + }, + { + "epoch": 2.9325456581335185, + "grad_norm": 0.12576620280742645, + "learning_rate": 4.499530226392268e-06, + "loss": 0.0948, + "step": 75950 + }, + { + "epoch": 2.9329317734275455, + "grad_norm": 0.9442972540855408, + "learning_rate": 4.473789206790481e-06, + "loss": 0.2244, + "step": 75960 + }, + { + "epoch": 2.9333178887215725, + "grad_norm": 0.026888804510235786, + "learning_rate": 4.448048187188695e-06, + "loss": 0.0444, + "step": 75970 + }, + { + "epoch": 2.933704004015599, + "grad_norm": 0.789533257484436, + "learning_rate": 4.422307167586908e-06, + "loss": 0.1891, + "step": 75980 + }, + { + "epoch": 2.9340901193096256, + "grad_norm": 1.3214176893234253, + "learning_rate": 4.396566147985122e-06, + "loss": 0.1072, + "step": 75990 + }, + { + "epoch": 2.9344762346036526, + "grad_norm": 1.0256754159927368, + "learning_rate": 4.370825128383335e-06, + "loss": 0.198, + "step": 76000 + }, + { + "epoch": 2.9348623498976796, + "grad_norm": 0.25208428502082825, + "learning_rate": 4.345084108781549e-06, + "loss": 0.1881, + "step": 76010 + }, + { + "epoch": 2.935248465191706, + "grad_norm": 1.5642906427383423, + "learning_rate": 4.319343089179763e-06, + "loss": 0.1642, + "step": 76020 + }, + { + "epoch": 2.935634580485733, + "grad_norm": 0.7479145526885986, + "learning_rate": 4.293602069577976e-06, + "loss": 0.1752, + "step": 76030 + }, + { + "epoch": 2.93602069577976, + "grad_norm": 0.16477616131305695, + "learning_rate": 4.26786104997619e-06, + "loss": 0.1692, + "step": 76040 + }, + { + "epoch": 2.9364068110737866, + "grad_norm": 1.2980380058288574, + "learning_rate": 4.242120030374403e-06, + "loss": 0.0983, + "step": 76050 + }, + { + "epoch": 2.936792926367813, + "grad_norm": 1.4535443782806396, + "learning_rate": 4.216379010772617e-06, + "loss": 0.1804, + "step": 76060 + }, + { + "epoch": 2.93717904166184, + "grad_norm": 0.6782435774803162, + "learning_rate": 4.19063799117083e-06, + "loss": 0.0837, + "step": 76070 + }, + { + "epoch": 2.937565156955867, + "grad_norm": 0.9832270741462708, + "learning_rate": 4.164896971569044e-06, + "loss": 0.2864, + "step": 76080 + }, + { + "epoch": 2.9379512722498937, + "grad_norm": 1.398342251777649, + "learning_rate": 4.1391559519672575e-06, + "loss": 0.1506, + "step": 76090 + }, + { + "epoch": 2.9383373875439207, + "grad_norm": 1.2533401250839233, + "learning_rate": 4.113414932365471e-06, + "loss": 0.1198, + "step": 76100 + }, + { + "epoch": 2.9387235028379473, + "grad_norm": 3.0472609996795654, + "learning_rate": 4.087673912763685e-06, + "loss": 0.1057, + "step": 76110 + }, + { + "epoch": 2.9391096181319742, + "grad_norm": 0.6309196352958679, + "learning_rate": 4.061932893161898e-06, + "loss": 0.043, + "step": 76120 + }, + { + "epoch": 2.939495733426001, + "grad_norm": 0.6146018505096436, + "learning_rate": 4.036191873560112e-06, + "loss": 0.2575, + "step": 76130 + }, + { + "epoch": 2.939881848720028, + "grad_norm": 0.9969425797462463, + "learning_rate": 4.010450853958325e-06, + "loss": 0.0714, + "step": 76140 + }, + { + "epoch": 2.9402679640140548, + "grad_norm": 1.1078910827636719, + "learning_rate": 3.984709834356539e-06, + "loss": 0.058, + "step": 76150 + }, + { + "epoch": 2.9406540793080813, + "grad_norm": 0.45345064997673035, + "learning_rate": 3.958968814754753e-06, + "loss": 0.2731, + "step": 76160 + }, + { + "epoch": 2.9410401946021083, + "grad_norm": 0.6446991562843323, + "learning_rate": 3.9332277951529665e-06, + "loss": 0.0851, + "step": 76170 + }, + { + "epoch": 2.941426309896135, + "grad_norm": 0.5553757548332214, + "learning_rate": 3.90748677555118e-06, + "loss": 0.1686, + "step": 76180 + }, + { + "epoch": 2.941812425190162, + "grad_norm": 0.46511319279670715, + "learning_rate": 3.881745755949393e-06, + "loss": 0.3051, + "step": 76190 + }, + { + "epoch": 2.9421985404841884, + "grad_norm": 0.7858741283416748, + "learning_rate": 3.856004736347607e-06, + "loss": 0.0879, + "step": 76200 + }, + { + "epoch": 2.9425846557782154, + "grad_norm": 2.4360992908477783, + "learning_rate": 3.830263716745821e-06, + "loss": 0.351, + "step": 76210 + }, + { + "epoch": 2.9429707710722424, + "grad_norm": 0.8845987915992737, + "learning_rate": 3.804522697144034e-06, + "loss": 0.1261, + "step": 76220 + }, + { + "epoch": 2.943356886366269, + "grad_norm": 0.07230502367019653, + "learning_rate": 3.7787816775422473e-06, + "loss": 0.1706, + "step": 76230 + }, + { + "epoch": 2.943743001660296, + "grad_norm": 1.0335034132003784, + "learning_rate": 3.753040657940461e-06, + "loss": 0.1003, + "step": 76240 + }, + { + "epoch": 2.9441291169543224, + "grad_norm": 0.17918971180915833, + "learning_rate": 3.727299638338675e-06, + "loss": 0.1177, + "step": 76250 + }, + { + "epoch": 2.9445152322483494, + "grad_norm": 0.28648892045021057, + "learning_rate": 3.701558618736888e-06, + "loss": 0.1744, + "step": 76260 + }, + { + "epoch": 2.944901347542376, + "grad_norm": 2.5599138736724854, + "learning_rate": 3.675817599135102e-06, + "loss": 0.15, + "step": 76270 + }, + { + "epoch": 2.945287462836403, + "grad_norm": 0.5117394924163818, + "learning_rate": 3.650076579533315e-06, + "loss": 0.1638, + "step": 76280 + }, + { + "epoch": 2.94567357813043, + "grad_norm": 1.0782241821289062, + "learning_rate": 3.6243355599315292e-06, + "loss": 0.326, + "step": 76290 + }, + { + "epoch": 2.9460596934244565, + "grad_norm": 0.049037184566259384, + "learning_rate": 3.598594540329742e-06, + "loss": 0.098, + "step": 76300 + }, + { + "epoch": 2.9464458087184835, + "grad_norm": 0.6411057710647583, + "learning_rate": 3.5728535207279563e-06, + "loss": 0.1082, + "step": 76310 + }, + { + "epoch": 2.94683192401251, + "grad_norm": 0.4438591003417969, + "learning_rate": 3.54711250112617e-06, + "loss": 0.1103, + "step": 76320 + }, + { + "epoch": 2.947218039306537, + "grad_norm": 0.2660674452781677, + "learning_rate": 3.5213714815243833e-06, + "loss": 0.1961, + "step": 76330 + }, + { + "epoch": 2.9476041546005636, + "grad_norm": 2.3970565795898438, + "learning_rate": 3.495630461922597e-06, + "loss": 0.2066, + "step": 76340 + }, + { + "epoch": 2.9479902698945906, + "grad_norm": 0.5123302936553955, + "learning_rate": 3.4698894423208104e-06, + "loss": 0.2231, + "step": 76350 + }, + { + "epoch": 2.9483763851886176, + "grad_norm": 0.026459665969014168, + "learning_rate": 3.444148422719024e-06, + "loss": 0.1865, + "step": 76360 + }, + { + "epoch": 2.948762500482644, + "grad_norm": 1.8527942895889282, + "learning_rate": 3.4184074031172374e-06, + "loss": 0.3425, + "step": 76370 + }, + { + "epoch": 2.9491486157766706, + "grad_norm": 0.3126128315925598, + "learning_rate": 3.392666383515451e-06, + "loss": 0.1047, + "step": 76380 + }, + { + "epoch": 2.9495347310706976, + "grad_norm": 0.14081552624702454, + "learning_rate": 3.3669253639136653e-06, + "loss": 0.1987, + "step": 76390 + }, + { + "epoch": 2.9499208463647246, + "grad_norm": 0.7128289341926575, + "learning_rate": 3.3411843443118782e-06, + "loss": 0.1398, + "step": 76400 + }, + { + "epoch": 2.950306961658751, + "grad_norm": 1.8297266960144043, + "learning_rate": 3.3154433247100924e-06, + "loss": 0.1874, + "step": 76410 + }, + { + "epoch": 2.950693076952778, + "grad_norm": 2.2352466583251953, + "learning_rate": 3.2897023051083053e-06, + "loss": 0.1561, + "step": 76420 + }, + { + "epoch": 2.951079192246805, + "grad_norm": 0.9707785844802856, + "learning_rate": 3.2639612855065194e-06, + "loss": 0.2287, + "step": 76430 + }, + { + "epoch": 2.9514653075408317, + "grad_norm": 1.408307433128357, + "learning_rate": 3.2382202659047323e-06, + "loss": 0.2413, + "step": 76440 + }, + { + "epoch": 2.9518514228348582, + "grad_norm": 1.706040382385254, + "learning_rate": 3.2124792463029465e-06, + "loss": 0.1153, + "step": 76450 + }, + { + "epoch": 2.9522375381288852, + "grad_norm": 1.6742459535598755, + "learning_rate": 3.1867382267011594e-06, + "loss": 0.1259, + "step": 76460 + }, + { + "epoch": 2.952623653422912, + "grad_norm": 2.4881958961486816, + "learning_rate": 3.1609972070993735e-06, + "loss": 0.2058, + "step": 76470 + }, + { + "epoch": 2.9530097687169388, + "grad_norm": 0.09501784294843674, + "learning_rate": 3.1352561874975872e-06, + "loss": 0.1849, + "step": 76480 + }, + { + "epoch": 2.9533958840109658, + "grad_norm": 0.6468241214752197, + "learning_rate": 3.1095151678958006e-06, + "loss": 0.3005, + "step": 76490 + }, + { + "epoch": 2.9537819993049927, + "grad_norm": 0.3400740921497345, + "learning_rate": 3.083774148294014e-06, + "loss": 0.1335, + "step": 76500 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.658527862784e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-76500/training_args.bin differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3628ebede3ebbdde09019268c6ae3be64d1c0423 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:219c5c8a8b738b7867019d0731620d062e6653b9f195e3c0bbf8cd953420186b +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..6ba075bebd6aa49e639680b341346c43b97b3a86 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b63fe109e964d853932ef3b1c95e8d53009066b97d3a6734027a79f8ec83849 +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..66b70950f0405802f8957abe17346c5b6b12ece6 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..779f09d299ea3bb4a35430b3a2ade565a7d6182b Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..601e373d1ff4d845a2af07acb38ea503fe8eef09 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/trainer_state.json @@ -0,0 +1,53934 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.9730877640063325, + "eval_steps": 500, + "global_step": 77000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + }, + { + "epoch": 2.91555658519634, + "grad_norm": 2.389887809753418, + "learning_rate": 5.63213508887087e-06, + "loss": 0.1397, + "step": 75510 + }, + { + "epoch": 2.9159427004903664, + "grad_norm": 0.4746516942977905, + "learning_rate": 5.6063940692690835e-06, + "loss": 0.1362, + "step": 75520 + }, + { + "epoch": 2.916328815784393, + "grad_norm": 1.1355524063110352, + "learning_rate": 5.580653049667298e-06, + "loss": 0.1772, + "step": 75530 + }, + { + "epoch": 2.91671493107842, + "grad_norm": 0.18736037611961365, + "learning_rate": 5.554912030065512e-06, + "loss": 0.1449, + "step": 75540 + }, + { + "epoch": 2.917101046372447, + "grad_norm": 2.575685501098633, + "learning_rate": 5.529171010463724e-06, + "loss": 0.1871, + "step": 75550 + }, + { + "epoch": 2.9174871616664735, + "grad_norm": 1.01112699508667, + "learning_rate": 5.5034299908619385e-06, + "loss": 0.1588, + "step": 75560 + }, + { + "epoch": 2.9178732769605005, + "grad_norm": 1.8929286003112793, + "learning_rate": 5.477688971260152e-06, + "loss": 0.1564, + "step": 75570 + }, + { + "epoch": 2.9182593922545275, + "grad_norm": 0.21724069118499756, + "learning_rate": 5.451947951658366e-06, + "loss": 0.2152, + "step": 75580 + }, + { + "epoch": 2.918645507548554, + "grad_norm": 1.2024441957473755, + "learning_rate": 5.426206932056578e-06, + "loss": 0.3032, + "step": 75590 + }, + { + "epoch": 2.9190316228425806, + "grad_norm": 0.14324747025966644, + "learning_rate": 5.4004659124547925e-06, + "loss": 0.0674, + "step": 75600 + }, + { + "epoch": 2.9194177381366075, + "grad_norm": 1.427650809288025, + "learning_rate": 5.374724892853006e-06, + "loss": 0.1086, + "step": 75610 + }, + { + "epoch": 2.9198038534306345, + "grad_norm": 1.2401851415634155, + "learning_rate": 5.34898387325122e-06, + "loss": 0.0944, + "step": 75620 + }, + { + "epoch": 2.920189968724661, + "grad_norm": 1.4218640327453613, + "learning_rate": 5.323242853649433e-06, + "loss": 0.1386, + "step": 75630 + }, + { + "epoch": 2.920576084018688, + "grad_norm": 1.0168864727020264, + "learning_rate": 5.297501834047647e-06, + "loss": 0.1291, + "step": 75640 + }, + { + "epoch": 2.9209621993127146, + "grad_norm": 0.2599659860134125, + "learning_rate": 5.271760814445861e-06, + "loss": 0.1055, + "step": 75650 + }, + { + "epoch": 2.9213483146067416, + "grad_norm": 2.1232173442840576, + "learning_rate": 5.246019794844074e-06, + "loss": 0.1521, + "step": 75660 + }, + { + "epoch": 2.921734429900768, + "grad_norm": 0.6988056302070618, + "learning_rate": 5.220278775242287e-06, + "loss": 0.1584, + "step": 75670 + }, + { + "epoch": 2.922120545194795, + "grad_norm": 1.766686201095581, + "learning_rate": 5.194537755640501e-06, + "loss": 0.1888, + "step": 75680 + }, + { + "epoch": 2.922506660488822, + "grad_norm": 1.17173433303833, + "learning_rate": 5.168796736038715e-06, + "loss": 0.1362, + "step": 75690 + }, + { + "epoch": 2.9228927757828487, + "grad_norm": 0.10190659016370773, + "learning_rate": 5.143055716436929e-06, + "loss": 0.1048, + "step": 75700 + }, + { + "epoch": 2.9232788910768757, + "grad_norm": 0.24241623282432556, + "learning_rate": 5.1173146968351415e-06, + "loss": 0.0925, + "step": 75710 + }, + { + "epoch": 2.923665006370902, + "grad_norm": 0.027136487886309624, + "learning_rate": 5.091573677233356e-06, + "loss": 0.1881, + "step": 75720 + }, + { + "epoch": 2.924051121664929, + "grad_norm": 0.7799992561340332, + "learning_rate": 5.065832657631569e-06, + "loss": 0.1733, + "step": 75730 + }, + { + "epoch": 2.9244372369589557, + "grad_norm": 0.11681391298770905, + "learning_rate": 5.040091638029783e-06, + "loss": 0.0768, + "step": 75740 + }, + { + "epoch": 2.9248233522529827, + "grad_norm": 3.007784366607666, + "learning_rate": 5.014350618427996e-06, + "loss": 0.2014, + "step": 75750 + }, + { + "epoch": 2.9252094675470097, + "grad_norm": 0.8907320499420166, + "learning_rate": 4.98860959882621e-06, + "loss": 0.3129, + "step": 75760 + }, + { + "epoch": 2.9255955828410363, + "grad_norm": 0.960918128490448, + "learning_rate": 4.962868579224423e-06, + "loss": 0.0904, + "step": 75770 + }, + { + "epoch": 2.9259816981350633, + "grad_norm": 1.1451547145843506, + "learning_rate": 4.937127559622637e-06, + "loss": 0.1966, + "step": 75780 + }, + { + "epoch": 2.92636781342909, + "grad_norm": 0.0598277747631073, + "learning_rate": 4.9113865400208505e-06, + "loss": 0.178, + "step": 75790 + }, + { + "epoch": 2.926753928723117, + "grad_norm": 2.012179374694824, + "learning_rate": 4.885645520419064e-06, + "loss": 0.1208, + "step": 75800 + }, + { + "epoch": 2.9271400440171433, + "grad_norm": 0.29047757387161255, + "learning_rate": 4.859904500817278e-06, + "loss": 0.1803, + "step": 75810 + }, + { + "epoch": 2.9275261593111703, + "grad_norm": 0.23592634499073029, + "learning_rate": 4.834163481215491e-06, + "loss": 0.1307, + "step": 75820 + }, + { + "epoch": 2.9279122746051973, + "grad_norm": 0.6524437069892883, + "learning_rate": 4.808422461613705e-06, + "loss": 0.1102, + "step": 75830 + }, + { + "epoch": 2.928298389899224, + "grad_norm": 4.513589382171631, + "learning_rate": 4.782681442011918e-06, + "loss": 0.3142, + "step": 75840 + }, + { + "epoch": 2.928684505193251, + "grad_norm": 0.2516826093196869, + "learning_rate": 4.756940422410132e-06, + "loss": 0.1691, + "step": 75850 + }, + { + "epoch": 2.9290706204872774, + "grad_norm": 0.05350786820054054, + "learning_rate": 4.731199402808345e-06, + "loss": 0.1919, + "step": 75860 + }, + { + "epoch": 2.9294567357813044, + "grad_norm": 0.23492521047592163, + "learning_rate": 4.705458383206559e-06, + "loss": 0.1481, + "step": 75870 + }, + { + "epoch": 2.929842851075331, + "grad_norm": 1.1959890127182007, + "learning_rate": 4.679717363604773e-06, + "loss": 0.1008, + "step": 75880 + }, + { + "epoch": 2.930228966369358, + "grad_norm": 1.4260644912719727, + "learning_rate": 4.653976344002986e-06, + "loss": 0.0918, + "step": 75890 + }, + { + "epoch": 2.930615081663385, + "grad_norm": 1.236479640007019, + "learning_rate": 4.6282353244011995e-06, + "loss": 0.0863, + "step": 75900 + }, + { + "epoch": 2.9310011969574115, + "grad_norm": 1.4055296182632446, + "learning_rate": 4.602494304799413e-06, + "loss": 0.176, + "step": 75910 + }, + { + "epoch": 2.931387312251438, + "grad_norm": 1.5062698125839233, + "learning_rate": 4.576753285197627e-06, + "loss": 0.1485, + "step": 75920 + }, + { + "epoch": 2.931773427545465, + "grad_norm": 0.586919367313385, + "learning_rate": 4.551012265595841e-06, + "loss": 0.1995, + "step": 75930 + }, + { + "epoch": 2.932159542839492, + "grad_norm": 0.755504310131073, + "learning_rate": 4.525271245994054e-06, + "loss": 0.1654, + "step": 75940 + }, + { + "epoch": 2.9325456581335185, + "grad_norm": 0.12576620280742645, + "learning_rate": 4.499530226392268e-06, + "loss": 0.0948, + "step": 75950 + }, + { + "epoch": 2.9329317734275455, + "grad_norm": 0.9442972540855408, + "learning_rate": 4.473789206790481e-06, + "loss": 0.2244, + "step": 75960 + }, + { + "epoch": 2.9333178887215725, + "grad_norm": 0.026888804510235786, + "learning_rate": 4.448048187188695e-06, + "loss": 0.0444, + "step": 75970 + }, + { + "epoch": 2.933704004015599, + "grad_norm": 0.789533257484436, + "learning_rate": 4.422307167586908e-06, + "loss": 0.1891, + "step": 75980 + }, + { + "epoch": 2.9340901193096256, + "grad_norm": 1.3214176893234253, + "learning_rate": 4.396566147985122e-06, + "loss": 0.1072, + "step": 75990 + }, + { + "epoch": 2.9344762346036526, + "grad_norm": 1.0256754159927368, + "learning_rate": 4.370825128383335e-06, + "loss": 0.198, + "step": 76000 + }, + { + "epoch": 2.9348623498976796, + "grad_norm": 0.25208428502082825, + "learning_rate": 4.345084108781549e-06, + "loss": 0.1881, + "step": 76010 + }, + { + "epoch": 2.935248465191706, + "grad_norm": 1.5642906427383423, + "learning_rate": 4.319343089179763e-06, + "loss": 0.1642, + "step": 76020 + }, + { + "epoch": 2.935634580485733, + "grad_norm": 0.7479145526885986, + "learning_rate": 4.293602069577976e-06, + "loss": 0.1752, + "step": 76030 + }, + { + "epoch": 2.93602069577976, + "grad_norm": 0.16477616131305695, + "learning_rate": 4.26786104997619e-06, + "loss": 0.1692, + "step": 76040 + }, + { + "epoch": 2.9364068110737866, + "grad_norm": 1.2980380058288574, + "learning_rate": 4.242120030374403e-06, + "loss": 0.0983, + "step": 76050 + }, + { + "epoch": 2.936792926367813, + "grad_norm": 1.4535443782806396, + "learning_rate": 4.216379010772617e-06, + "loss": 0.1804, + "step": 76060 + }, + { + "epoch": 2.93717904166184, + "grad_norm": 0.6782435774803162, + "learning_rate": 4.19063799117083e-06, + "loss": 0.0837, + "step": 76070 + }, + { + "epoch": 2.937565156955867, + "grad_norm": 0.9832270741462708, + "learning_rate": 4.164896971569044e-06, + "loss": 0.2864, + "step": 76080 + }, + { + "epoch": 2.9379512722498937, + "grad_norm": 1.398342251777649, + "learning_rate": 4.1391559519672575e-06, + "loss": 0.1506, + "step": 76090 + }, + { + "epoch": 2.9383373875439207, + "grad_norm": 1.2533401250839233, + "learning_rate": 4.113414932365471e-06, + "loss": 0.1198, + "step": 76100 + }, + { + "epoch": 2.9387235028379473, + "grad_norm": 3.0472609996795654, + "learning_rate": 4.087673912763685e-06, + "loss": 0.1057, + "step": 76110 + }, + { + "epoch": 2.9391096181319742, + "grad_norm": 0.6309196352958679, + "learning_rate": 4.061932893161898e-06, + "loss": 0.043, + "step": 76120 + }, + { + "epoch": 2.939495733426001, + "grad_norm": 0.6146018505096436, + "learning_rate": 4.036191873560112e-06, + "loss": 0.2575, + "step": 76130 + }, + { + "epoch": 2.939881848720028, + "grad_norm": 0.9969425797462463, + "learning_rate": 4.010450853958325e-06, + "loss": 0.0714, + "step": 76140 + }, + { + "epoch": 2.9402679640140548, + "grad_norm": 1.1078910827636719, + "learning_rate": 3.984709834356539e-06, + "loss": 0.058, + "step": 76150 + }, + { + "epoch": 2.9406540793080813, + "grad_norm": 0.45345064997673035, + "learning_rate": 3.958968814754753e-06, + "loss": 0.2731, + "step": 76160 + }, + { + "epoch": 2.9410401946021083, + "grad_norm": 0.6446991562843323, + "learning_rate": 3.9332277951529665e-06, + "loss": 0.0851, + "step": 76170 + }, + { + "epoch": 2.941426309896135, + "grad_norm": 0.5553757548332214, + "learning_rate": 3.90748677555118e-06, + "loss": 0.1686, + "step": 76180 + }, + { + "epoch": 2.941812425190162, + "grad_norm": 0.46511319279670715, + "learning_rate": 3.881745755949393e-06, + "loss": 0.3051, + "step": 76190 + }, + { + "epoch": 2.9421985404841884, + "grad_norm": 0.7858741283416748, + "learning_rate": 3.856004736347607e-06, + "loss": 0.0879, + "step": 76200 + }, + { + "epoch": 2.9425846557782154, + "grad_norm": 2.4360992908477783, + "learning_rate": 3.830263716745821e-06, + "loss": 0.351, + "step": 76210 + }, + { + "epoch": 2.9429707710722424, + "grad_norm": 0.8845987915992737, + "learning_rate": 3.804522697144034e-06, + "loss": 0.1261, + "step": 76220 + }, + { + "epoch": 2.943356886366269, + "grad_norm": 0.07230502367019653, + "learning_rate": 3.7787816775422473e-06, + "loss": 0.1706, + "step": 76230 + }, + { + "epoch": 2.943743001660296, + "grad_norm": 1.0335034132003784, + "learning_rate": 3.753040657940461e-06, + "loss": 0.1003, + "step": 76240 + }, + { + "epoch": 2.9441291169543224, + "grad_norm": 0.17918971180915833, + "learning_rate": 3.727299638338675e-06, + "loss": 0.1177, + "step": 76250 + }, + { + "epoch": 2.9445152322483494, + "grad_norm": 0.28648892045021057, + "learning_rate": 3.701558618736888e-06, + "loss": 0.1744, + "step": 76260 + }, + { + "epoch": 2.944901347542376, + "grad_norm": 2.5599138736724854, + "learning_rate": 3.675817599135102e-06, + "loss": 0.15, + "step": 76270 + }, + { + "epoch": 2.945287462836403, + "grad_norm": 0.5117394924163818, + "learning_rate": 3.650076579533315e-06, + "loss": 0.1638, + "step": 76280 + }, + { + "epoch": 2.94567357813043, + "grad_norm": 1.0782241821289062, + "learning_rate": 3.6243355599315292e-06, + "loss": 0.326, + "step": 76290 + }, + { + "epoch": 2.9460596934244565, + "grad_norm": 0.049037184566259384, + "learning_rate": 3.598594540329742e-06, + "loss": 0.098, + "step": 76300 + }, + { + "epoch": 2.9464458087184835, + "grad_norm": 0.6411057710647583, + "learning_rate": 3.5728535207279563e-06, + "loss": 0.1082, + "step": 76310 + }, + { + "epoch": 2.94683192401251, + "grad_norm": 0.4438591003417969, + "learning_rate": 3.54711250112617e-06, + "loss": 0.1103, + "step": 76320 + }, + { + "epoch": 2.947218039306537, + "grad_norm": 0.2660674452781677, + "learning_rate": 3.5213714815243833e-06, + "loss": 0.1961, + "step": 76330 + }, + { + "epoch": 2.9476041546005636, + "grad_norm": 2.3970565795898438, + "learning_rate": 3.495630461922597e-06, + "loss": 0.2066, + "step": 76340 + }, + { + "epoch": 2.9479902698945906, + "grad_norm": 0.5123302936553955, + "learning_rate": 3.4698894423208104e-06, + "loss": 0.2231, + "step": 76350 + }, + { + "epoch": 2.9483763851886176, + "grad_norm": 0.026459665969014168, + "learning_rate": 3.444148422719024e-06, + "loss": 0.1865, + "step": 76360 + }, + { + "epoch": 2.948762500482644, + "grad_norm": 1.8527942895889282, + "learning_rate": 3.4184074031172374e-06, + "loss": 0.3425, + "step": 76370 + }, + { + "epoch": 2.9491486157766706, + "grad_norm": 0.3126128315925598, + "learning_rate": 3.392666383515451e-06, + "loss": 0.1047, + "step": 76380 + }, + { + "epoch": 2.9495347310706976, + "grad_norm": 0.14081552624702454, + "learning_rate": 3.3669253639136653e-06, + "loss": 0.1987, + "step": 76390 + }, + { + "epoch": 2.9499208463647246, + "grad_norm": 0.7128289341926575, + "learning_rate": 3.3411843443118782e-06, + "loss": 0.1398, + "step": 76400 + }, + { + "epoch": 2.950306961658751, + "grad_norm": 1.8297266960144043, + "learning_rate": 3.3154433247100924e-06, + "loss": 0.1874, + "step": 76410 + }, + { + "epoch": 2.950693076952778, + "grad_norm": 2.2352466583251953, + "learning_rate": 3.2897023051083053e-06, + "loss": 0.1561, + "step": 76420 + }, + { + "epoch": 2.951079192246805, + "grad_norm": 0.9707785844802856, + "learning_rate": 3.2639612855065194e-06, + "loss": 0.2287, + "step": 76430 + }, + { + "epoch": 2.9514653075408317, + "grad_norm": 1.408307433128357, + "learning_rate": 3.2382202659047323e-06, + "loss": 0.2413, + "step": 76440 + }, + { + "epoch": 2.9518514228348582, + "grad_norm": 1.706040382385254, + "learning_rate": 3.2124792463029465e-06, + "loss": 0.1153, + "step": 76450 + }, + { + "epoch": 2.9522375381288852, + "grad_norm": 1.6742459535598755, + "learning_rate": 3.1867382267011594e-06, + "loss": 0.1259, + "step": 76460 + }, + { + "epoch": 2.952623653422912, + "grad_norm": 2.4881958961486816, + "learning_rate": 3.1609972070993735e-06, + "loss": 0.2058, + "step": 76470 + }, + { + "epoch": 2.9530097687169388, + "grad_norm": 0.09501784294843674, + "learning_rate": 3.1352561874975872e-06, + "loss": 0.1849, + "step": 76480 + }, + { + "epoch": 2.9533958840109658, + "grad_norm": 0.6468241214752197, + "learning_rate": 3.1095151678958006e-06, + "loss": 0.3005, + "step": 76490 + }, + { + "epoch": 2.9537819993049927, + "grad_norm": 0.3400740921497345, + "learning_rate": 3.083774148294014e-06, + "loss": 0.1335, + "step": 76500 + }, + { + "epoch": 2.9541681145990193, + "grad_norm": 0.841686487197876, + "learning_rate": 3.0580331286922276e-06, + "loss": 0.1571, + "step": 76510 + }, + { + "epoch": 2.954554229893046, + "grad_norm": 0.3212125301361084, + "learning_rate": 3.0322921090904413e-06, + "loss": 0.1652, + "step": 76520 + }, + { + "epoch": 2.954940345187073, + "grad_norm": 1.0158278942108154, + "learning_rate": 3.0065510894886546e-06, + "loss": 0.255, + "step": 76530 + }, + { + "epoch": 2.9553264604811, + "grad_norm": 2.152977466583252, + "learning_rate": 2.9808100698868684e-06, + "loss": 0.2047, + "step": 76540 + }, + { + "epoch": 2.9557125757751264, + "grad_norm": 1.1242973804473877, + "learning_rate": 2.9550690502850817e-06, + "loss": 0.2116, + "step": 76550 + }, + { + "epoch": 2.9560986910691534, + "grad_norm": 2.271522045135498, + "learning_rate": 2.9293280306832954e-06, + "loss": 0.2411, + "step": 76560 + }, + { + "epoch": 2.95648480636318, + "grad_norm": 2.5018372535705566, + "learning_rate": 2.9035870110815087e-06, + "loss": 0.2311, + "step": 76570 + }, + { + "epoch": 2.956870921657207, + "grad_norm": 2.2592756748199463, + "learning_rate": 2.877845991479723e-06, + "loss": 0.1271, + "step": 76580 + }, + { + "epoch": 2.9572570369512334, + "grad_norm": 1.9872472286224365, + "learning_rate": 2.8521049718779362e-06, + "loss": 0.2476, + "step": 76590 + }, + { + "epoch": 2.9576431522452604, + "grad_norm": 0.44302454590797424, + "learning_rate": 2.82636395227615e-06, + "loss": 0.1617, + "step": 76600 + }, + { + "epoch": 2.9580292675392874, + "grad_norm": 0.10741741210222244, + "learning_rate": 2.8006229326743633e-06, + "loss": 0.1844, + "step": 76610 + }, + { + "epoch": 2.958415382833314, + "grad_norm": 0.7484311461448669, + "learning_rate": 2.774881913072577e-06, + "loss": 0.1211, + "step": 76620 + }, + { + "epoch": 2.958801498127341, + "grad_norm": 1.170832633972168, + "learning_rate": 2.7491408934707903e-06, + "loss": 0.2335, + "step": 76630 + }, + { + "epoch": 2.9591876134213675, + "grad_norm": 1.429801344871521, + "learning_rate": 2.723399873869004e-06, + "loss": 0.1425, + "step": 76640 + }, + { + "epoch": 2.9595737287153945, + "grad_norm": 1.5668152570724487, + "learning_rate": 2.6976588542672174e-06, + "loss": 0.0826, + "step": 76650 + }, + { + "epoch": 2.959959844009421, + "grad_norm": 0.4563259482383728, + "learning_rate": 2.6719178346654315e-06, + "loss": 0.1126, + "step": 76660 + }, + { + "epoch": 2.960345959303448, + "grad_norm": 0.0956023558974266, + "learning_rate": 2.646176815063645e-06, + "loss": 0.0703, + "step": 76670 + }, + { + "epoch": 2.960732074597475, + "grad_norm": 0.34736737608909607, + "learning_rate": 2.6204357954618586e-06, + "loss": 0.2169, + "step": 76680 + }, + { + "epoch": 2.9611181898915016, + "grad_norm": 0.6178987622261047, + "learning_rate": 2.594694775860072e-06, + "loss": 0.1367, + "step": 76690 + }, + { + "epoch": 2.9615043051855285, + "grad_norm": 0.2700929045677185, + "learning_rate": 2.5689537562582856e-06, + "loss": 0.1022, + "step": 76700 + }, + { + "epoch": 2.961890420479555, + "grad_norm": 0.9162507653236389, + "learning_rate": 2.543212736656499e-06, + "loss": 0.1318, + "step": 76710 + }, + { + "epoch": 2.962276535773582, + "grad_norm": 1.0987880229949951, + "learning_rate": 2.5174717170547126e-06, + "loss": 0.1194, + "step": 76720 + }, + { + "epoch": 2.9626626510676086, + "grad_norm": 1.9178944826126099, + "learning_rate": 2.491730697452926e-06, + "loss": 0.1422, + "step": 76730 + }, + { + "epoch": 2.9630487663616356, + "grad_norm": 0.029073640704154968, + "learning_rate": 2.46598967785114e-06, + "loss": 0.1077, + "step": 76740 + }, + { + "epoch": 2.9634348816556626, + "grad_norm": 0.3460249900817871, + "learning_rate": 2.4402486582493534e-06, + "loss": 0.193, + "step": 76750 + }, + { + "epoch": 2.963820996949689, + "grad_norm": 0.08659185469150543, + "learning_rate": 2.414507638647567e-06, + "loss": 0.1638, + "step": 76760 + }, + { + "epoch": 2.964207112243716, + "grad_norm": 0.3209403455257416, + "learning_rate": 2.3887666190457805e-06, + "loss": 0.1265, + "step": 76770 + }, + { + "epoch": 2.9645932275377427, + "grad_norm": 2.6390767097473145, + "learning_rate": 2.363025599443994e-06, + "loss": 0.2551, + "step": 76780 + }, + { + "epoch": 2.9649793428317697, + "grad_norm": 1.400255799293518, + "learning_rate": 2.3372845798422075e-06, + "loss": 0.1893, + "step": 76790 + }, + { + "epoch": 2.965365458125796, + "grad_norm": 1.8555853366851807, + "learning_rate": 2.3115435602404213e-06, + "loss": 0.155, + "step": 76800 + }, + { + "epoch": 2.965751573419823, + "grad_norm": 1.8044438362121582, + "learning_rate": 2.2858025406386346e-06, + "loss": 0.3129, + "step": 76810 + }, + { + "epoch": 2.96613768871385, + "grad_norm": 2.4024667739868164, + "learning_rate": 2.2600615210368483e-06, + "loss": 0.1965, + "step": 76820 + }, + { + "epoch": 2.9665238040078767, + "grad_norm": 2.0369980335235596, + "learning_rate": 2.234320501435062e-06, + "loss": 0.1335, + "step": 76830 + }, + { + "epoch": 2.9669099193019033, + "grad_norm": 1.8668415546417236, + "learning_rate": 2.2085794818332758e-06, + "loss": 0.0668, + "step": 76840 + }, + { + "epoch": 2.9672960345959303, + "grad_norm": 0.18365631997585297, + "learning_rate": 2.182838462231489e-06, + "loss": 0.0758, + "step": 76850 + }, + { + "epoch": 2.9676821498899573, + "grad_norm": 2.2368826866149902, + "learning_rate": 2.157097442629703e-06, + "loss": 0.1002, + "step": 76860 + }, + { + "epoch": 2.968068265183984, + "grad_norm": 1.877583622932434, + "learning_rate": 2.131356423027916e-06, + "loss": 0.1702, + "step": 76870 + }, + { + "epoch": 2.968454380478011, + "grad_norm": 0.635735273361206, + "learning_rate": 2.10561540342613e-06, + "loss": 0.1851, + "step": 76880 + }, + { + "epoch": 2.968840495772038, + "grad_norm": 2.5163917541503906, + "learning_rate": 2.0798743838243436e-06, + "loss": 0.0917, + "step": 76890 + }, + { + "epoch": 2.9692266110660643, + "grad_norm": 0.0662187710404396, + "learning_rate": 2.054133364222557e-06, + "loss": 0.0544, + "step": 76900 + }, + { + "epoch": 2.969612726360091, + "grad_norm": 0.8235715627670288, + "learning_rate": 2.0283923446207706e-06, + "loss": 0.1892, + "step": 76910 + }, + { + "epoch": 2.969998841654118, + "grad_norm": 0.11744660139083862, + "learning_rate": 2.002651325018984e-06, + "loss": 0.0543, + "step": 76920 + }, + { + "epoch": 2.970384956948145, + "grad_norm": 2.3874759674072266, + "learning_rate": 1.9769103054171977e-06, + "loss": 0.1903, + "step": 76930 + }, + { + "epoch": 2.9707710722421714, + "grad_norm": 1.0060522556304932, + "learning_rate": 1.951169285815411e-06, + "loss": 0.1119, + "step": 76940 + }, + { + "epoch": 2.9711571875361984, + "grad_norm": 1.8163440227508545, + "learning_rate": 1.9254282662136247e-06, + "loss": 0.1515, + "step": 76950 + }, + { + "epoch": 2.971543302830225, + "grad_norm": 0.9836590886116028, + "learning_rate": 1.8996872466118383e-06, + "loss": 0.1354, + "step": 76960 + }, + { + "epoch": 2.971929418124252, + "grad_norm": 0.4806680679321289, + "learning_rate": 1.8739462270100522e-06, + "loss": 0.1058, + "step": 76970 + }, + { + "epoch": 2.9723155334182785, + "grad_norm": 0.043541885912418365, + "learning_rate": 1.8482052074082657e-06, + "loss": 0.1007, + "step": 76980 + }, + { + "epoch": 2.9727016487123055, + "grad_norm": 0.2944481074810028, + "learning_rate": 1.8224641878064793e-06, + "loss": 0.1376, + "step": 76990 + }, + { + "epoch": 2.9730877640063325, + "grad_norm": 1.73750901222229, + "learning_rate": 1.7967231682046928e-06, + "loss": 0.1875, + "step": 77000 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.688975757312e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77000/training_args.bin differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..84b49805011bb72a48280aaf5ab51abb4785e7dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:707c4d3761c9b48f671307ba2646dd5b3c46d3a5ac8f4a0de1647ea7a34ffb93 +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..07eb462bc57354096405479e1d4c168c56671bfc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:881fb8a1e5c6d8de16cb6688cfa1a0b68fd2014b603b87166746545b827cf5f2 +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..d3ef3738343d56d6d44faaf0134d1702788b2ce0 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..7e85e48cd655a7146ee6fa9314810593606e0489 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..aa84882c8d432e33320b255aca371d072e9814ba --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/trainer_state.json @@ -0,0 +1,54284 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.992393528707672, + "eval_steps": 500, + "global_step": 77500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + }, + { + "epoch": 2.91555658519634, + "grad_norm": 2.389887809753418, + "learning_rate": 5.63213508887087e-06, + "loss": 0.1397, + "step": 75510 + }, + { + "epoch": 2.9159427004903664, + "grad_norm": 0.4746516942977905, + "learning_rate": 5.6063940692690835e-06, + "loss": 0.1362, + "step": 75520 + }, + { + "epoch": 2.916328815784393, + "grad_norm": 1.1355524063110352, + "learning_rate": 5.580653049667298e-06, + "loss": 0.1772, + "step": 75530 + }, + { + "epoch": 2.91671493107842, + "grad_norm": 0.18736037611961365, + "learning_rate": 5.554912030065512e-06, + "loss": 0.1449, + "step": 75540 + }, + { + "epoch": 2.917101046372447, + "grad_norm": 2.575685501098633, + "learning_rate": 5.529171010463724e-06, + "loss": 0.1871, + "step": 75550 + }, + { + "epoch": 2.9174871616664735, + "grad_norm": 1.01112699508667, + "learning_rate": 5.5034299908619385e-06, + "loss": 0.1588, + "step": 75560 + }, + { + "epoch": 2.9178732769605005, + "grad_norm": 1.8929286003112793, + "learning_rate": 5.477688971260152e-06, + "loss": 0.1564, + "step": 75570 + }, + { + "epoch": 2.9182593922545275, + "grad_norm": 0.21724069118499756, + "learning_rate": 5.451947951658366e-06, + "loss": 0.2152, + "step": 75580 + }, + { + "epoch": 2.918645507548554, + "grad_norm": 1.2024441957473755, + "learning_rate": 5.426206932056578e-06, + "loss": 0.3032, + "step": 75590 + }, + { + "epoch": 2.9190316228425806, + "grad_norm": 0.14324747025966644, + "learning_rate": 5.4004659124547925e-06, + "loss": 0.0674, + "step": 75600 + }, + { + "epoch": 2.9194177381366075, + "grad_norm": 1.427650809288025, + "learning_rate": 5.374724892853006e-06, + "loss": 0.1086, + "step": 75610 + }, + { + "epoch": 2.9198038534306345, + "grad_norm": 1.2401851415634155, + "learning_rate": 5.34898387325122e-06, + "loss": 0.0944, + "step": 75620 + }, + { + "epoch": 2.920189968724661, + "grad_norm": 1.4218640327453613, + "learning_rate": 5.323242853649433e-06, + "loss": 0.1386, + "step": 75630 + }, + { + "epoch": 2.920576084018688, + "grad_norm": 1.0168864727020264, + "learning_rate": 5.297501834047647e-06, + "loss": 0.1291, + "step": 75640 + }, + { + "epoch": 2.9209621993127146, + "grad_norm": 0.2599659860134125, + "learning_rate": 5.271760814445861e-06, + "loss": 0.1055, + "step": 75650 + }, + { + "epoch": 2.9213483146067416, + "grad_norm": 2.1232173442840576, + "learning_rate": 5.246019794844074e-06, + "loss": 0.1521, + "step": 75660 + }, + { + "epoch": 2.921734429900768, + "grad_norm": 0.6988056302070618, + "learning_rate": 5.220278775242287e-06, + "loss": 0.1584, + "step": 75670 + }, + { + "epoch": 2.922120545194795, + "grad_norm": 1.766686201095581, + "learning_rate": 5.194537755640501e-06, + "loss": 0.1888, + "step": 75680 + }, + { + "epoch": 2.922506660488822, + "grad_norm": 1.17173433303833, + "learning_rate": 5.168796736038715e-06, + "loss": 0.1362, + "step": 75690 + }, + { + "epoch": 2.9228927757828487, + "grad_norm": 0.10190659016370773, + "learning_rate": 5.143055716436929e-06, + "loss": 0.1048, + "step": 75700 + }, + { + "epoch": 2.9232788910768757, + "grad_norm": 0.24241623282432556, + "learning_rate": 5.1173146968351415e-06, + "loss": 0.0925, + "step": 75710 + }, + { + "epoch": 2.923665006370902, + "grad_norm": 0.027136487886309624, + "learning_rate": 5.091573677233356e-06, + "loss": 0.1881, + "step": 75720 + }, + { + "epoch": 2.924051121664929, + "grad_norm": 0.7799992561340332, + "learning_rate": 5.065832657631569e-06, + "loss": 0.1733, + "step": 75730 + }, + { + "epoch": 2.9244372369589557, + "grad_norm": 0.11681391298770905, + "learning_rate": 5.040091638029783e-06, + "loss": 0.0768, + "step": 75740 + }, + { + "epoch": 2.9248233522529827, + "grad_norm": 3.007784366607666, + "learning_rate": 5.014350618427996e-06, + "loss": 0.2014, + "step": 75750 + }, + { + "epoch": 2.9252094675470097, + "grad_norm": 0.8907320499420166, + "learning_rate": 4.98860959882621e-06, + "loss": 0.3129, + "step": 75760 + }, + { + "epoch": 2.9255955828410363, + "grad_norm": 0.960918128490448, + "learning_rate": 4.962868579224423e-06, + "loss": 0.0904, + "step": 75770 + }, + { + "epoch": 2.9259816981350633, + "grad_norm": 1.1451547145843506, + "learning_rate": 4.937127559622637e-06, + "loss": 0.1966, + "step": 75780 + }, + { + "epoch": 2.92636781342909, + "grad_norm": 0.0598277747631073, + "learning_rate": 4.9113865400208505e-06, + "loss": 0.178, + "step": 75790 + }, + { + "epoch": 2.926753928723117, + "grad_norm": 2.012179374694824, + "learning_rate": 4.885645520419064e-06, + "loss": 0.1208, + "step": 75800 + }, + { + "epoch": 2.9271400440171433, + "grad_norm": 0.29047757387161255, + "learning_rate": 4.859904500817278e-06, + "loss": 0.1803, + "step": 75810 + }, + { + "epoch": 2.9275261593111703, + "grad_norm": 0.23592634499073029, + "learning_rate": 4.834163481215491e-06, + "loss": 0.1307, + "step": 75820 + }, + { + "epoch": 2.9279122746051973, + "grad_norm": 0.6524437069892883, + "learning_rate": 4.808422461613705e-06, + "loss": 0.1102, + "step": 75830 + }, + { + "epoch": 2.928298389899224, + "grad_norm": 4.513589382171631, + "learning_rate": 4.782681442011918e-06, + "loss": 0.3142, + "step": 75840 + }, + { + "epoch": 2.928684505193251, + "grad_norm": 0.2516826093196869, + "learning_rate": 4.756940422410132e-06, + "loss": 0.1691, + "step": 75850 + }, + { + "epoch": 2.9290706204872774, + "grad_norm": 0.05350786820054054, + "learning_rate": 4.731199402808345e-06, + "loss": 0.1919, + "step": 75860 + }, + { + "epoch": 2.9294567357813044, + "grad_norm": 0.23492521047592163, + "learning_rate": 4.705458383206559e-06, + "loss": 0.1481, + "step": 75870 + }, + { + "epoch": 2.929842851075331, + "grad_norm": 1.1959890127182007, + "learning_rate": 4.679717363604773e-06, + "loss": 0.1008, + "step": 75880 + }, + { + "epoch": 2.930228966369358, + "grad_norm": 1.4260644912719727, + "learning_rate": 4.653976344002986e-06, + "loss": 0.0918, + "step": 75890 + }, + { + "epoch": 2.930615081663385, + "grad_norm": 1.236479640007019, + "learning_rate": 4.6282353244011995e-06, + "loss": 0.0863, + "step": 75900 + }, + { + "epoch": 2.9310011969574115, + "grad_norm": 1.4055296182632446, + "learning_rate": 4.602494304799413e-06, + "loss": 0.176, + "step": 75910 + }, + { + "epoch": 2.931387312251438, + "grad_norm": 1.5062698125839233, + "learning_rate": 4.576753285197627e-06, + "loss": 0.1485, + "step": 75920 + }, + { + "epoch": 2.931773427545465, + "grad_norm": 0.586919367313385, + "learning_rate": 4.551012265595841e-06, + "loss": 0.1995, + "step": 75930 + }, + { + "epoch": 2.932159542839492, + "grad_norm": 0.755504310131073, + "learning_rate": 4.525271245994054e-06, + "loss": 0.1654, + "step": 75940 + }, + { + "epoch": 2.9325456581335185, + "grad_norm": 0.12576620280742645, + "learning_rate": 4.499530226392268e-06, + "loss": 0.0948, + "step": 75950 + }, + { + "epoch": 2.9329317734275455, + "grad_norm": 0.9442972540855408, + "learning_rate": 4.473789206790481e-06, + "loss": 0.2244, + "step": 75960 + }, + { + "epoch": 2.9333178887215725, + "grad_norm": 0.026888804510235786, + "learning_rate": 4.448048187188695e-06, + "loss": 0.0444, + "step": 75970 + }, + { + "epoch": 2.933704004015599, + "grad_norm": 0.789533257484436, + "learning_rate": 4.422307167586908e-06, + "loss": 0.1891, + "step": 75980 + }, + { + "epoch": 2.9340901193096256, + "grad_norm": 1.3214176893234253, + "learning_rate": 4.396566147985122e-06, + "loss": 0.1072, + "step": 75990 + }, + { + "epoch": 2.9344762346036526, + "grad_norm": 1.0256754159927368, + "learning_rate": 4.370825128383335e-06, + "loss": 0.198, + "step": 76000 + }, + { + "epoch": 2.9348623498976796, + "grad_norm": 0.25208428502082825, + "learning_rate": 4.345084108781549e-06, + "loss": 0.1881, + "step": 76010 + }, + { + "epoch": 2.935248465191706, + "grad_norm": 1.5642906427383423, + "learning_rate": 4.319343089179763e-06, + "loss": 0.1642, + "step": 76020 + }, + { + "epoch": 2.935634580485733, + "grad_norm": 0.7479145526885986, + "learning_rate": 4.293602069577976e-06, + "loss": 0.1752, + "step": 76030 + }, + { + "epoch": 2.93602069577976, + "grad_norm": 0.16477616131305695, + "learning_rate": 4.26786104997619e-06, + "loss": 0.1692, + "step": 76040 + }, + { + "epoch": 2.9364068110737866, + "grad_norm": 1.2980380058288574, + "learning_rate": 4.242120030374403e-06, + "loss": 0.0983, + "step": 76050 + }, + { + "epoch": 2.936792926367813, + "grad_norm": 1.4535443782806396, + "learning_rate": 4.216379010772617e-06, + "loss": 0.1804, + "step": 76060 + }, + { + "epoch": 2.93717904166184, + "grad_norm": 0.6782435774803162, + "learning_rate": 4.19063799117083e-06, + "loss": 0.0837, + "step": 76070 + }, + { + "epoch": 2.937565156955867, + "grad_norm": 0.9832270741462708, + "learning_rate": 4.164896971569044e-06, + "loss": 0.2864, + "step": 76080 + }, + { + "epoch": 2.9379512722498937, + "grad_norm": 1.398342251777649, + "learning_rate": 4.1391559519672575e-06, + "loss": 0.1506, + "step": 76090 + }, + { + "epoch": 2.9383373875439207, + "grad_norm": 1.2533401250839233, + "learning_rate": 4.113414932365471e-06, + "loss": 0.1198, + "step": 76100 + }, + { + "epoch": 2.9387235028379473, + "grad_norm": 3.0472609996795654, + "learning_rate": 4.087673912763685e-06, + "loss": 0.1057, + "step": 76110 + }, + { + "epoch": 2.9391096181319742, + "grad_norm": 0.6309196352958679, + "learning_rate": 4.061932893161898e-06, + "loss": 0.043, + "step": 76120 + }, + { + "epoch": 2.939495733426001, + "grad_norm": 0.6146018505096436, + "learning_rate": 4.036191873560112e-06, + "loss": 0.2575, + "step": 76130 + }, + { + "epoch": 2.939881848720028, + "grad_norm": 0.9969425797462463, + "learning_rate": 4.010450853958325e-06, + "loss": 0.0714, + "step": 76140 + }, + { + "epoch": 2.9402679640140548, + "grad_norm": 1.1078910827636719, + "learning_rate": 3.984709834356539e-06, + "loss": 0.058, + "step": 76150 + }, + { + "epoch": 2.9406540793080813, + "grad_norm": 0.45345064997673035, + "learning_rate": 3.958968814754753e-06, + "loss": 0.2731, + "step": 76160 + }, + { + "epoch": 2.9410401946021083, + "grad_norm": 0.6446991562843323, + "learning_rate": 3.9332277951529665e-06, + "loss": 0.0851, + "step": 76170 + }, + { + "epoch": 2.941426309896135, + "grad_norm": 0.5553757548332214, + "learning_rate": 3.90748677555118e-06, + "loss": 0.1686, + "step": 76180 + }, + { + "epoch": 2.941812425190162, + "grad_norm": 0.46511319279670715, + "learning_rate": 3.881745755949393e-06, + "loss": 0.3051, + "step": 76190 + }, + { + "epoch": 2.9421985404841884, + "grad_norm": 0.7858741283416748, + "learning_rate": 3.856004736347607e-06, + "loss": 0.0879, + "step": 76200 + }, + { + "epoch": 2.9425846557782154, + "grad_norm": 2.4360992908477783, + "learning_rate": 3.830263716745821e-06, + "loss": 0.351, + "step": 76210 + }, + { + "epoch": 2.9429707710722424, + "grad_norm": 0.8845987915992737, + "learning_rate": 3.804522697144034e-06, + "loss": 0.1261, + "step": 76220 + }, + { + "epoch": 2.943356886366269, + "grad_norm": 0.07230502367019653, + "learning_rate": 3.7787816775422473e-06, + "loss": 0.1706, + "step": 76230 + }, + { + "epoch": 2.943743001660296, + "grad_norm": 1.0335034132003784, + "learning_rate": 3.753040657940461e-06, + "loss": 0.1003, + "step": 76240 + }, + { + "epoch": 2.9441291169543224, + "grad_norm": 0.17918971180915833, + "learning_rate": 3.727299638338675e-06, + "loss": 0.1177, + "step": 76250 + }, + { + "epoch": 2.9445152322483494, + "grad_norm": 0.28648892045021057, + "learning_rate": 3.701558618736888e-06, + "loss": 0.1744, + "step": 76260 + }, + { + "epoch": 2.944901347542376, + "grad_norm": 2.5599138736724854, + "learning_rate": 3.675817599135102e-06, + "loss": 0.15, + "step": 76270 + }, + { + "epoch": 2.945287462836403, + "grad_norm": 0.5117394924163818, + "learning_rate": 3.650076579533315e-06, + "loss": 0.1638, + "step": 76280 + }, + { + "epoch": 2.94567357813043, + "grad_norm": 1.0782241821289062, + "learning_rate": 3.6243355599315292e-06, + "loss": 0.326, + "step": 76290 + }, + { + "epoch": 2.9460596934244565, + "grad_norm": 0.049037184566259384, + "learning_rate": 3.598594540329742e-06, + "loss": 0.098, + "step": 76300 + }, + { + "epoch": 2.9464458087184835, + "grad_norm": 0.6411057710647583, + "learning_rate": 3.5728535207279563e-06, + "loss": 0.1082, + "step": 76310 + }, + { + "epoch": 2.94683192401251, + "grad_norm": 0.4438591003417969, + "learning_rate": 3.54711250112617e-06, + "loss": 0.1103, + "step": 76320 + }, + { + "epoch": 2.947218039306537, + "grad_norm": 0.2660674452781677, + "learning_rate": 3.5213714815243833e-06, + "loss": 0.1961, + "step": 76330 + }, + { + "epoch": 2.9476041546005636, + "grad_norm": 2.3970565795898438, + "learning_rate": 3.495630461922597e-06, + "loss": 0.2066, + "step": 76340 + }, + { + "epoch": 2.9479902698945906, + "grad_norm": 0.5123302936553955, + "learning_rate": 3.4698894423208104e-06, + "loss": 0.2231, + "step": 76350 + }, + { + "epoch": 2.9483763851886176, + "grad_norm": 0.026459665969014168, + "learning_rate": 3.444148422719024e-06, + "loss": 0.1865, + "step": 76360 + }, + { + "epoch": 2.948762500482644, + "grad_norm": 1.8527942895889282, + "learning_rate": 3.4184074031172374e-06, + "loss": 0.3425, + "step": 76370 + }, + { + "epoch": 2.9491486157766706, + "grad_norm": 0.3126128315925598, + "learning_rate": 3.392666383515451e-06, + "loss": 0.1047, + "step": 76380 + }, + { + "epoch": 2.9495347310706976, + "grad_norm": 0.14081552624702454, + "learning_rate": 3.3669253639136653e-06, + "loss": 0.1987, + "step": 76390 + }, + { + "epoch": 2.9499208463647246, + "grad_norm": 0.7128289341926575, + "learning_rate": 3.3411843443118782e-06, + "loss": 0.1398, + "step": 76400 + }, + { + "epoch": 2.950306961658751, + "grad_norm": 1.8297266960144043, + "learning_rate": 3.3154433247100924e-06, + "loss": 0.1874, + "step": 76410 + }, + { + "epoch": 2.950693076952778, + "grad_norm": 2.2352466583251953, + "learning_rate": 3.2897023051083053e-06, + "loss": 0.1561, + "step": 76420 + }, + { + "epoch": 2.951079192246805, + "grad_norm": 0.9707785844802856, + "learning_rate": 3.2639612855065194e-06, + "loss": 0.2287, + "step": 76430 + }, + { + "epoch": 2.9514653075408317, + "grad_norm": 1.408307433128357, + "learning_rate": 3.2382202659047323e-06, + "loss": 0.2413, + "step": 76440 + }, + { + "epoch": 2.9518514228348582, + "grad_norm": 1.706040382385254, + "learning_rate": 3.2124792463029465e-06, + "loss": 0.1153, + "step": 76450 + }, + { + "epoch": 2.9522375381288852, + "grad_norm": 1.6742459535598755, + "learning_rate": 3.1867382267011594e-06, + "loss": 0.1259, + "step": 76460 + }, + { + "epoch": 2.952623653422912, + "grad_norm": 2.4881958961486816, + "learning_rate": 3.1609972070993735e-06, + "loss": 0.2058, + "step": 76470 + }, + { + "epoch": 2.9530097687169388, + "grad_norm": 0.09501784294843674, + "learning_rate": 3.1352561874975872e-06, + "loss": 0.1849, + "step": 76480 + }, + { + "epoch": 2.9533958840109658, + "grad_norm": 0.6468241214752197, + "learning_rate": 3.1095151678958006e-06, + "loss": 0.3005, + "step": 76490 + }, + { + "epoch": 2.9537819993049927, + "grad_norm": 0.3400740921497345, + "learning_rate": 3.083774148294014e-06, + "loss": 0.1335, + "step": 76500 + }, + { + "epoch": 2.9541681145990193, + "grad_norm": 0.841686487197876, + "learning_rate": 3.0580331286922276e-06, + "loss": 0.1571, + "step": 76510 + }, + { + "epoch": 2.954554229893046, + "grad_norm": 0.3212125301361084, + "learning_rate": 3.0322921090904413e-06, + "loss": 0.1652, + "step": 76520 + }, + { + "epoch": 2.954940345187073, + "grad_norm": 1.0158278942108154, + "learning_rate": 3.0065510894886546e-06, + "loss": 0.255, + "step": 76530 + }, + { + "epoch": 2.9553264604811, + "grad_norm": 2.152977466583252, + "learning_rate": 2.9808100698868684e-06, + "loss": 0.2047, + "step": 76540 + }, + { + "epoch": 2.9557125757751264, + "grad_norm": 1.1242973804473877, + "learning_rate": 2.9550690502850817e-06, + "loss": 0.2116, + "step": 76550 + }, + { + "epoch": 2.9560986910691534, + "grad_norm": 2.271522045135498, + "learning_rate": 2.9293280306832954e-06, + "loss": 0.2411, + "step": 76560 + }, + { + "epoch": 2.95648480636318, + "grad_norm": 2.5018372535705566, + "learning_rate": 2.9035870110815087e-06, + "loss": 0.2311, + "step": 76570 + }, + { + "epoch": 2.956870921657207, + "grad_norm": 2.2592756748199463, + "learning_rate": 2.877845991479723e-06, + "loss": 0.1271, + "step": 76580 + }, + { + "epoch": 2.9572570369512334, + "grad_norm": 1.9872472286224365, + "learning_rate": 2.8521049718779362e-06, + "loss": 0.2476, + "step": 76590 + }, + { + "epoch": 2.9576431522452604, + "grad_norm": 0.44302454590797424, + "learning_rate": 2.82636395227615e-06, + "loss": 0.1617, + "step": 76600 + }, + { + "epoch": 2.9580292675392874, + "grad_norm": 0.10741741210222244, + "learning_rate": 2.8006229326743633e-06, + "loss": 0.1844, + "step": 76610 + }, + { + "epoch": 2.958415382833314, + "grad_norm": 0.7484311461448669, + "learning_rate": 2.774881913072577e-06, + "loss": 0.1211, + "step": 76620 + }, + { + "epoch": 2.958801498127341, + "grad_norm": 1.170832633972168, + "learning_rate": 2.7491408934707903e-06, + "loss": 0.2335, + "step": 76630 + }, + { + "epoch": 2.9591876134213675, + "grad_norm": 1.429801344871521, + "learning_rate": 2.723399873869004e-06, + "loss": 0.1425, + "step": 76640 + }, + { + "epoch": 2.9595737287153945, + "grad_norm": 1.5668152570724487, + "learning_rate": 2.6976588542672174e-06, + "loss": 0.0826, + "step": 76650 + }, + { + "epoch": 2.959959844009421, + "grad_norm": 0.4563259482383728, + "learning_rate": 2.6719178346654315e-06, + "loss": 0.1126, + "step": 76660 + }, + { + "epoch": 2.960345959303448, + "grad_norm": 0.0956023558974266, + "learning_rate": 2.646176815063645e-06, + "loss": 0.0703, + "step": 76670 + }, + { + "epoch": 2.960732074597475, + "grad_norm": 0.34736737608909607, + "learning_rate": 2.6204357954618586e-06, + "loss": 0.2169, + "step": 76680 + }, + { + "epoch": 2.9611181898915016, + "grad_norm": 0.6178987622261047, + "learning_rate": 2.594694775860072e-06, + "loss": 0.1367, + "step": 76690 + }, + { + "epoch": 2.9615043051855285, + "grad_norm": 0.2700929045677185, + "learning_rate": 2.5689537562582856e-06, + "loss": 0.1022, + "step": 76700 + }, + { + "epoch": 2.961890420479555, + "grad_norm": 0.9162507653236389, + "learning_rate": 2.543212736656499e-06, + "loss": 0.1318, + "step": 76710 + }, + { + "epoch": 2.962276535773582, + "grad_norm": 1.0987880229949951, + "learning_rate": 2.5174717170547126e-06, + "loss": 0.1194, + "step": 76720 + }, + { + "epoch": 2.9626626510676086, + "grad_norm": 1.9178944826126099, + "learning_rate": 2.491730697452926e-06, + "loss": 0.1422, + "step": 76730 + }, + { + "epoch": 2.9630487663616356, + "grad_norm": 0.029073640704154968, + "learning_rate": 2.46598967785114e-06, + "loss": 0.1077, + "step": 76740 + }, + { + "epoch": 2.9634348816556626, + "grad_norm": 0.3460249900817871, + "learning_rate": 2.4402486582493534e-06, + "loss": 0.193, + "step": 76750 + }, + { + "epoch": 2.963820996949689, + "grad_norm": 0.08659185469150543, + "learning_rate": 2.414507638647567e-06, + "loss": 0.1638, + "step": 76760 + }, + { + "epoch": 2.964207112243716, + "grad_norm": 0.3209403455257416, + "learning_rate": 2.3887666190457805e-06, + "loss": 0.1265, + "step": 76770 + }, + { + "epoch": 2.9645932275377427, + "grad_norm": 2.6390767097473145, + "learning_rate": 2.363025599443994e-06, + "loss": 0.2551, + "step": 76780 + }, + { + "epoch": 2.9649793428317697, + "grad_norm": 1.400255799293518, + "learning_rate": 2.3372845798422075e-06, + "loss": 0.1893, + "step": 76790 + }, + { + "epoch": 2.965365458125796, + "grad_norm": 1.8555853366851807, + "learning_rate": 2.3115435602404213e-06, + "loss": 0.155, + "step": 76800 + }, + { + "epoch": 2.965751573419823, + "grad_norm": 1.8044438362121582, + "learning_rate": 2.2858025406386346e-06, + "loss": 0.3129, + "step": 76810 + }, + { + "epoch": 2.96613768871385, + "grad_norm": 2.4024667739868164, + "learning_rate": 2.2600615210368483e-06, + "loss": 0.1965, + "step": 76820 + }, + { + "epoch": 2.9665238040078767, + "grad_norm": 2.0369980335235596, + "learning_rate": 2.234320501435062e-06, + "loss": 0.1335, + "step": 76830 + }, + { + "epoch": 2.9669099193019033, + "grad_norm": 1.8668415546417236, + "learning_rate": 2.2085794818332758e-06, + "loss": 0.0668, + "step": 76840 + }, + { + "epoch": 2.9672960345959303, + "grad_norm": 0.18365631997585297, + "learning_rate": 2.182838462231489e-06, + "loss": 0.0758, + "step": 76850 + }, + { + "epoch": 2.9676821498899573, + "grad_norm": 2.2368826866149902, + "learning_rate": 2.157097442629703e-06, + "loss": 0.1002, + "step": 76860 + }, + { + "epoch": 2.968068265183984, + "grad_norm": 1.877583622932434, + "learning_rate": 2.131356423027916e-06, + "loss": 0.1702, + "step": 76870 + }, + { + "epoch": 2.968454380478011, + "grad_norm": 0.635735273361206, + "learning_rate": 2.10561540342613e-06, + "loss": 0.1851, + "step": 76880 + }, + { + "epoch": 2.968840495772038, + "grad_norm": 2.5163917541503906, + "learning_rate": 2.0798743838243436e-06, + "loss": 0.0917, + "step": 76890 + }, + { + "epoch": 2.9692266110660643, + "grad_norm": 0.0662187710404396, + "learning_rate": 2.054133364222557e-06, + "loss": 0.0544, + "step": 76900 + }, + { + "epoch": 2.969612726360091, + "grad_norm": 0.8235715627670288, + "learning_rate": 2.0283923446207706e-06, + "loss": 0.1892, + "step": 76910 + }, + { + "epoch": 2.969998841654118, + "grad_norm": 0.11744660139083862, + "learning_rate": 2.002651325018984e-06, + "loss": 0.0543, + "step": 76920 + }, + { + "epoch": 2.970384956948145, + "grad_norm": 2.3874759674072266, + "learning_rate": 1.9769103054171977e-06, + "loss": 0.1903, + "step": 76930 + }, + { + "epoch": 2.9707710722421714, + "grad_norm": 1.0060522556304932, + "learning_rate": 1.951169285815411e-06, + "loss": 0.1119, + "step": 76940 + }, + { + "epoch": 2.9711571875361984, + "grad_norm": 1.8163440227508545, + "learning_rate": 1.9254282662136247e-06, + "loss": 0.1515, + "step": 76950 + }, + { + "epoch": 2.971543302830225, + "grad_norm": 0.9836590886116028, + "learning_rate": 1.8996872466118383e-06, + "loss": 0.1354, + "step": 76960 + }, + { + "epoch": 2.971929418124252, + "grad_norm": 0.4806680679321289, + "learning_rate": 1.8739462270100522e-06, + "loss": 0.1058, + "step": 76970 + }, + { + "epoch": 2.9723155334182785, + "grad_norm": 0.043541885912418365, + "learning_rate": 1.8482052074082657e-06, + "loss": 0.1007, + "step": 76980 + }, + { + "epoch": 2.9727016487123055, + "grad_norm": 0.2944481074810028, + "learning_rate": 1.8224641878064793e-06, + "loss": 0.1376, + "step": 76990 + }, + { + "epoch": 2.9730877640063325, + "grad_norm": 1.73750901222229, + "learning_rate": 1.7967231682046928e-06, + "loss": 0.1875, + "step": 77000 + }, + { + "epoch": 2.973473879300359, + "grad_norm": 0.7777722477912903, + "learning_rate": 1.7709821486029063e-06, + "loss": 0.2761, + "step": 77010 + }, + { + "epoch": 2.973859994594386, + "grad_norm": 1.6995668411254883, + "learning_rate": 1.7452411290011198e-06, + "loss": 0.0507, + "step": 77020 + }, + { + "epoch": 2.9742461098884125, + "grad_norm": 2.498788833618164, + "learning_rate": 1.7195001093993333e-06, + "loss": 0.1609, + "step": 77030 + }, + { + "epoch": 2.9746322251824395, + "grad_norm": 1.8842010498046875, + "learning_rate": 1.6937590897975469e-06, + "loss": 0.1728, + "step": 77040 + }, + { + "epoch": 2.975018340476466, + "grad_norm": 1.1742241382598877, + "learning_rate": 1.6680180701957606e-06, + "loss": 0.1388, + "step": 77050 + }, + { + "epoch": 2.975404455770493, + "grad_norm": 0.616058886051178, + "learning_rate": 1.6422770505939741e-06, + "loss": 0.092, + "step": 77060 + }, + { + "epoch": 2.97579057106452, + "grad_norm": 0.2943461239337921, + "learning_rate": 1.6165360309921877e-06, + "loss": 0.0929, + "step": 77070 + }, + { + "epoch": 2.9761766863585466, + "grad_norm": 0.9972903728485107, + "learning_rate": 1.5907950113904012e-06, + "loss": 0.091, + "step": 77080 + }, + { + "epoch": 2.9765628016525736, + "grad_norm": 0.3846336901187897, + "learning_rate": 1.5650539917886147e-06, + "loss": 0.1561, + "step": 77090 + }, + { + "epoch": 2.9769489169466, + "grad_norm": 2.607909917831421, + "learning_rate": 1.5393129721868282e-06, + "loss": 0.2393, + "step": 77100 + }, + { + "epoch": 2.977335032240627, + "grad_norm": 0.7278031706809998, + "learning_rate": 1.513571952585042e-06, + "loss": 0.1189, + "step": 77110 + }, + { + "epoch": 2.9777211475346537, + "grad_norm": 1.9696396589279175, + "learning_rate": 1.4878309329832555e-06, + "loss": 0.1954, + "step": 77120 + }, + { + "epoch": 2.9781072628286807, + "grad_norm": 1.2762713432312012, + "learning_rate": 1.462089913381469e-06, + "loss": 0.2617, + "step": 77130 + }, + { + "epoch": 2.9784933781227076, + "grad_norm": 1.557726502418518, + "learning_rate": 1.4363488937796827e-06, + "loss": 0.2751, + "step": 77140 + }, + { + "epoch": 2.978879493416734, + "grad_norm": 0.5568321347236633, + "learning_rate": 1.4106078741778963e-06, + "loss": 0.2168, + "step": 77150 + }, + { + "epoch": 2.979265608710761, + "grad_norm": 2.3710832595825195, + "learning_rate": 1.3848668545761098e-06, + "loss": 0.244, + "step": 77160 + }, + { + "epoch": 2.9796517240047877, + "grad_norm": 1.4126112461090088, + "learning_rate": 1.3591258349743233e-06, + "loss": 0.1546, + "step": 77170 + }, + { + "epoch": 2.9800378392988147, + "grad_norm": 0.4143407940864563, + "learning_rate": 1.333384815372537e-06, + "loss": 0.1715, + "step": 77180 + }, + { + "epoch": 2.9804239545928413, + "grad_norm": 0.4667656719684601, + "learning_rate": 1.3076437957707506e-06, + "loss": 0.144, + "step": 77190 + }, + { + "epoch": 2.9808100698868683, + "grad_norm": 2.8623580932617188, + "learning_rate": 1.281902776168964e-06, + "loss": 0.1881, + "step": 77200 + }, + { + "epoch": 2.9811961851808952, + "grad_norm": 1.9620566368103027, + "learning_rate": 1.2561617565671776e-06, + "loss": 0.196, + "step": 77210 + }, + { + "epoch": 2.981582300474922, + "grad_norm": 0.6524580717086792, + "learning_rate": 1.2304207369653913e-06, + "loss": 0.0423, + "step": 77220 + }, + { + "epoch": 2.9819684157689483, + "grad_norm": 0.09811830520629883, + "learning_rate": 1.2046797173636049e-06, + "loss": 0.0493, + "step": 77230 + }, + { + "epoch": 2.9823545310629753, + "grad_norm": 0.45934444665908813, + "learning_rate": 1.1789386977618184e-06, + "loss": 0.044, + "step": 77240 + }, + { + "epoch": 2.9827406463570023, + "grad_norm": 1.32642662525177, + "learning_rate": 1.153197678160032e-06, + "loss": 0.1132, + "step": 77250 + }, + { + "epoch": 2.983126761651029, + "grad_norm": 0.5210559964179993, + "learning_rate": 1.1274566585582457e-06, + "loss": 0.1504, + "step": 77260 + }, + { + "epoch": 2.983512876945056, + "grad_norm": 0.6723094582557678, + "learning_rate": 1.1017156389564592e-06, + "loss": 0.0915, + "step": 77270 + }, + { + "epoch": 2.983898992239083, + "grad_norm": 1.8885656595230103, + "learning_rate": 1.0759746193546727e-06, + "loss": 0.1754, + "step": 77280 + }, + { + "epoch": 2.9842851075331094, + "grad_norm": 0.7771125435829163, + "learning_rate": 1.0502335997528862e-06, + "loss": 0.1099, + "step": 77290 + }, + { + "epoch": 2.984671222827136, + "grad_norm": 0.3368057310581207, + "learning_rate": 1.0244925801511e-06, + "loss": 0.2531, + "step": 77300 + }, + { + "epoch": 2.985057338121163, + "grad_norm": 1.5150898694992065, + "learning_rate": 9.987515605493135e-07, + "loss": 0.1183, + "step": 77310 + }, + { + "epoch": 2.98544345341519, + "grad_norm": 1.127661943435669, + "learning_rate": 9.73010540947527e-07, + "loss": 0.0798, + "step": 77320 + }, + { + "epoch": 2.9858295687092165, + "grad_norm": 0.38720184564590454, + "learning_rate": 9.472695213457404e-07, + "loss": 0.224, + "step": 77330 + }, + { + "epoch": 2.9862156840032434, + "grad_norm": 2.868779182434082, + "learning_rate": 9.215285017439542e-07, + "loss": 0.1443, + "step": 77340 + }, + { + "epoch": 2.9866017992972704, + "grad_norm": 1.1396832466125488, + "learning_rate": 8.957874821421677e-07, + "loss": 0.1854, + "step": 77350 + }, + { + "epoch": 2.986987914591297, + "grad_norm": 1.5037932395935059, + "learning_rate": 8.700464625403812e-07, + "loss": 0.1981, + "step": 77360 + }, + { + "epoch": 2.9873740298853235, + "grad_norm": 2.0281424522399902, + "learning_rate": 8.443054429385947e-07, + "loss": 0.1497, + "step": 77370 + }, + { + "epoch": 2.9877601451793505, + "grad_norm": 0.9007585048675537, + "learning_rate": 8.185644233368085e-07, + "loss": 0.1785, + "step": 77380 + }, + { + "epoch": 2.9881462604733775, + "grad_norm": 1.5973069667816162, + "learning_rate": 7.92823403735022e-07, + "loss": 0.2503, + "step": 77390 + }, + { + "epoch": 2.988532375767404, + "grad_norm": 1.3147287368774414, + "learning_rate": 7.670823841332356e-07, + "loss": 0.1826, + "step": 77400 + }, + { + "epoch": 2.988918491061431, + "grad_norm": 1.1539599895477295, + "learning_rate": 7.413413645314491e-07, + "loss": 0.1909, + "step": 77410 + }, + { + "epoch": 2.9893046063554576, + "grad_norm": 1.8404300212860107, + "learning_rate": 7.156003449296628e-07, + "loss": 0.1325, + "step": 77420 + }, + { + "epoch": 2.9896907216494846, + "grad_norm": 0.768785834312439, + "learning_rate": 6.898593253278763e-07, + "loss": 0.2586, + "step": 77430 + }, + { + "epoch": 2.990076836943511, + "grad_norm": 1.2869620323181152, + "learning_rate": 6.641183057260899e-07, + "loss": 0.4009, + "step": 77440 + }, + { + "epoch": 2.990462952237538, + "grad_norm": 0.19744427502155304, + "learning_rate": 6.383772861243034e-07, + "loss": 0.135, + "step": 77450 + }, + { + "epoch": 2.990849067531565, + "grad_norm": 0.26801246404647827, + "learning_rate": 6.126362665225171e-07, + "loss": 0.1728, + "step": 77460 + }, + { + "epoch": 2.9912351828255916, + "grad_norm": 0.010998820886015892, + "learning_rate": 5.868952469207306e-07, + "loss": 0.0807, + "step": 77470 + }, + { + "epoch": 2.9916212981196186, + "grad_norm": 0.11696690320968628, + "learning_rate": 5.611542273189441e-07, + "loss": 0.1431, + "step": 77480 + }, + { + "epoch": 2.992007413413645, + "grad_norm": 0.16014862060546875, + "learning_rate": 5.354132077171576e-07, + "loss": 0.0245, + "step": 77490 + }, + { + "epoch": 2.992393528707672, + "grad_norm": 0.01859739050269127, + "learning_rate": 5.096721881153713e-07, + "loss": 0.0767, + "step": 77500 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.71942365184e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77500/training_args.bin differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/added_tokens.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea36ca9a30d42cfe00f964ed2b450595386671dc --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/added_tokens.json @@ -0,0 +1,98 @@ +{ + "": 36095, + "": 36085, + "": 36084, + "": 36083, + "": 36082, + "": 36081, + "": 36080, + "": 36079, + "": 36078, + "": 36077, + "": 36076, + "": 36094, + "": 36075, + "": 36074, + "": 36073, + "": 36072, + "": 36071, + "": 36070, + "": 36069, + "": 36068, + "": 36067, + "": 36066, + "": 36093, + "": 36065, + "": 36064, + "": 36063, + "": 36062, + "": 36061, + "": 36060, + "": 36059, + "": 36058, + "": 36057, + "": 36056, + "": 36092, + "": 36055, + "": 36054, + "": 36053, + "": 36052, + "": 36051, + "": 36050, + "": 36049, + "": 36048, + "": 36047, + "": 36046, + "": 36091, + "": 36045, + "": 36044, + "": 36043, + "": 36042, + "": 36041, + "": 36040, + "": 36039, + "": 36038, + "": 36037, + "": 36036, + "": 36090, + "": 36035, + "": 36034, + "": 36033, + "": 36032, + "": 36031, + "": 36030, + "": 36029, + "": 36028, + "": 36027, + "": 36026, + "": 36089, + "": 36025, + "": 36024, + "": 36023, + "": 36022, + "": 36021, + "": 36020, + "": 36019, + "": 36018, + "": 36017, + "": 36016, + "": 36088, + "": 36015, + "": 36014, + "": 36013, + "": 36012, + "": 36011, + "": 36010, + "": 36009, + "": 36008, + "": 36007, + "": 36006, + "": 36087, + "": 36005, + "": 36004, + "": 36003, + "": 36002, + "": 36001, + "": 36000, + "": 36086 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/config.json new file mode 100644 index 0000000000000000000000000000000000000000..402574816dbd44d7c00b58882614b0cba909b7da --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/config.json @@ -0,0 +1,31 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "torch_dtype": "float32", + "transformers_version": "4.53.2", + "use_cache": true, + "vocab_size": 36096 +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/generation_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..88a411aa38dffcace543088cff3153003de6e01a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "decoder_start_token_id": 0, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.53.2" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/model.safetensors b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..45232b544712ffe662b14a42ea4506be720389c1 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8487406c1348f4cccfa15647fb90cd1b13ea7dafaae00fe8285b5afeee4d9c26 +size 903834408 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/optimizer.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..ba820d58b93c67ea906c2e8ed252c5a10df63e2e --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d65d43d78914eaaca430bf9e5bc0dd2807b0608e43f77ee779f0329bc7b4a4c +size 1807824651 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/rng_state.pth b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..fad6d17e7d6fefd6f9749cc9920bdbf9e635421b Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/rng_state.pth differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/scheduler.pt b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..c34ac1791753dd6ddd333fa585498f8dabbd1323 Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/scheduler.pt differ diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/special_tokens_map.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..843b3344f47b1783c48b5ac91bb6015ae9d3c4be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/special_tokens_map.json @@ -0,0 +1,121 @@ +{ + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/spiece.model b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/spiece.model new file mode 100644 index 0000000000000000000000000000000000000000..f8bddaf892bdf23d2148f3a3b358f16c5c45c7be --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/spiece.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59986b62f9f0b90edafb9b073ea7b93d21114a5841219a1ea2399ade73f729c6 +size 820370 diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/tokenizer_config.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7da9aea82b39809d9fbe6214e6f4fa2340b695f8 --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/tokenizer_config.json @@ -0,0 +1,905 @@ +{ + "add_prefix_space": true, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "36000": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36001": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36002": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36003": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36004": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36005": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36006": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36007": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36008": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36009": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36010": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36011": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36012": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36013": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36014": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36015": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36016": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36017": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36018": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36019": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36020": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36021": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36022": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36023": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36024": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36025": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36026": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36027": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36028": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36029": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36030": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36031": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36032": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36033": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36034": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36035": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36036": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36037": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36038": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36039": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36040": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36041": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36042": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36043": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36044": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36045": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36046": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36047": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36048": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36049": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36050": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36051": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36052": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36053": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36054": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36055": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36056": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36057": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36058": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36059": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36060": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36061": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36062": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36063": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36064": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36065": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36066": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36067": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36068": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36069": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36070": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36071": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36072": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36073": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36074": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36075": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36076": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36077": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36078": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36079": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36080": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36081": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36082": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36083": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36084": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36085": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36086": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36087": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36088": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36089": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36090": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36091": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36092": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36093": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36094": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + }, + "36095": { + "content": "", + "lstrip": true, + "normalized": false, + "rstrip": true, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "" + ], + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_ids": 96, + "extra_special_tokens": {}, + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "tokenizer_class": "T5Tokenizer", + "unk_token": "" +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/trainer_state.json b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..b69ab670865b003c610571425f82185f3deb766a --- /dev/null +++ b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/trainer_state.json @@ -0,0 +1,54417 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 3.0, + "eval_steps": 500, + "global_step": 77697, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0003861152940267964, + "grad_norm": 8.003422737121582, + "learning_rate": 0.0001999768330823584, + "loss": 5.0107, + "step": 10 + }, + { + "epoch": 0.0007722305880535929, + "grad_norm": 11.965606689453125, + "learning_rate": 0.0001999510920627566, + "loss": 0.9013, + "step": 20 + }, + { + "epoch": 0.0011583458820803893, + "grad_norm": 9.783374786376953, + "learning_rate": 0.00019992535104315483, + "loss": 0.5506, + "step": 30 + }, + { + "epoch": 0.0015444611761071857, + "grad_norm": 5.720436096191406, + "learning_rate": 0.00019989961002355304, + "loss": 0.501, + "step": 40 + }, + { + "epoch": 0.001930576470133982, + "grad_norm": 11.18126392364502, + "learning_rate": 0.00019987386900395125, + "loss": 0.8176, + "step": 50 + }, + { + "epoch": 0.0023166917641607786, + "grad_norm": 8.88875675201416, + "learning_rate": 0.00019984812798434947, + "loss": 0.5404, + "step": 60 + }, + { + "epoch": 0.0027028070581875748, + "grad_norm": 2.9886066913604736, + "learning_rate": 0.00019982238696474768, + "loss": 0.5295, + "step": 70 + }, + { + "epoch": 0.0030889223522143714, + "grad_norm": 8.936307907104492, + "learning_rate": 0.0001997966459451459, + "loss": 0.7398, + "step": 80 + }, + { + "epoch": 0.0034750376462411676, + "grad_norm": 11.393534660339355, + "learning_rate": 0.0001997709049255441, + "loss": 0.6333, + "step": 90 + }, + { + "epoch": 0.003861152940267964, + "grad_norm": 8.725994110107422, + "learning_rate": 0.00019974516390594235, + "loss": 0.4689, + "step": 100 + }, + { + "epoch": 0.00424726823429476, + "grad_norm": 20.316652297973633, + "learning_rate": 0.00019971942288634053, + "loss": 0.8522, + "step": 110 + }, + { + "epoch": 0.004633383528321557, + "grad_norm": 4.232663631439209, + "learning_rate": 0.00019969368186673875, + "loss": 0.5041, + "step": 120 + }, + { + "epoch": 0.005019498822348353, + "grad_norm": 2.609255313873291, + "learning_rate": 0.00019966794084713696, + "loss": 0.7439, + "step": 130 + }, + { + "epoch": 0.0054056141163751495, + "grad_norm": 10.063919067382812, + "learning_rate": 0.00019964219982753517, + "loss": 0.6702, + "step": 140 + }, + { + "epoch": 0.005791729410401946, + "grad_norm": 5.799802303314209, + "learning_rate": 0.00019961645880793339, + "loss": 0.7356, + "step": 150 + }, + { + "epoch": 0.006177844704428743, + "grad_norm": 12.664258003234863, + "learning_rate": 0.0001995907177883316, + "loss": 0.4067, + "step": 160 + }, + { + "epoch": 0.006563959998455539, + "grad_norm": 9.42366886138916, + "learning_rate": 0.00019956497676872984, + "loss": 0.4767, + "step": 170 + }, + { + "epoch": 0.006950075292482335, + "grad_norm": 5.382272243499756, + "learning_rate": 0.00019953923574912803, + "loss": 0.536, + "step": 180 + }, + { + "epoch": 0.0073361905865091314, + "grad_norm": 9.797371864318848, + "learning_rate": 0.00019951349472952624, + "loss": 0.4735, + "step": 190 + }, + { + "epoch": 0.007722305880535928, + "grad_norm": 7.965329647064209, + "learning_rate": 0.00019948775370992445, + "loss": 0.3881, + "step": 200 + }, + { + "epoch": 0.008108421174562725, + "grad_norm": 4.075791835784912, + "learning_rate": 0.00019946201269032267, + "loss": 0.5564, + "step": 210 + }, + { + "epoch": 0.00849453646858952, + "grad_norm": 24.367305755615234, + "learning_rate": 0.0001994362716707209, + "loss": 0.9795, + "step": 220 + }, + { + "epoch": 0.008880651762616317, + "grad_norm": 9.627866744995117, + "learning_rate": 0.0001994105306511191, + "loss": 0.4528, + "step": 230 + }, + { + "epoch": 0.009266767056643114, + "grad_norm": 7.469555854797363, + "learning_rate": 0.00019938478963151733, + "loss": 0.447, + "step": 240 + }, + { + "epoch": 0.00965288235066991, + "grad_norm": 7.426730155944824, + "learning_rate": 0.00019935904861191552, + "loss": 0.6026, + "step": 250 + }, + { + "epoch": 0.010038997644696707, + "grad_norm": 6.999317169189453, + "learning_rate": 0.00019933330759231373, + "loss": 0.4962, + "step": 260 + }, + { + "epoch": 0.010425112938723502, + "grad_norm": 10.492286682128906, + "learning_rate": 0.00019930756657271194, + "loss": 0.7987, + "step": 270 + }, + { + "epoch": 0.010811228232750299, + "grad_norm": 7.079407215118408, + "learning_rate": 0.00019928182555311016, + "loss": 0.4395, + "step": 280 + }, + { + "epoch": 0.011197343526777096, + "grad_norm": 9.610014915466309, + "learning_rate": 0.0001992560845335084, + "loss": 0.748, + "step": 290 + }, + { + "epoch": 0.011583458820803891, + "grad_norm": 5.993048667907715, + "learning_rate": 0.00019923034351390658, + "loss": 0.4328, + "step": 300 + }, + { + "epoch": 0.011969574114830689, + "grad_norm": 7.336791515350342, + "learning_rate": 0.00019920460249430483, + "loss": 0.4104, + "step": 310 + }, + { + "epoch": 0.012355689408857486, + "grad_norm": 7.967221736907959, + "learning_rate": 0.000199178861474703, + "loss": 0.4662, + "step": 320 + }, + { + "epoch": 0.012741804702884281, + "grad_norm": 4.464987754821777, + "learning_rate": 0.00019915312045510125, + "loss": 0.725, + "step": 330 + }, + { + "epoch": 0.013127919996911078, + "grad_norm": 8.669449806213379, + "learning_rate": 0.00019912737943549944, + "loss": 0.4256, + "step": 340 + }, + { + "epoch": 0.013514035290937873, + "grad_norm": 4.114014148712158, + "learning_rate": 0.00019910163841589765, + "loss": 0.4477, + "step": 350 + }, + { + "epoch": 0.01390015058496467, + "grad_norm": 9.254106521606445, + "learning_rate": 0.0001990758973962959, + "loss": 0.514, + "step": 360 + }, + { + "epoch": 0.014286265878991468, + "grad_norm": 0.8039970993995667, + "learning_rate": 0.00019905015637669408, + "loss": 0.5802, + "step": 370 + }, + { + "epoch": 0.014672381173018263, + "grad_norm": 3.9931838512420654, + "learning_rate": 0.00019902441535709232, + "loss": 0.8973, + "step": 380 + }, + { + "epoch": 0.01505849646704506, + "grad_norm": 1.7645355463027954, + "learning_rate": 0.0001989986743374905, + "loss": 0.7108, + "step": 390 + }, + { + "epoch": 0.015444611761071855, + "grad_norm": 6.8542866706848145, + "learning_rate": 0.00019897293331788875, + "loss": 0.5796, + "step": 400 + }, + { + "epoch": 0.015830727055098654, + "grad_norm": 5.278103828430176, + "learning_rate": 0.00019894719229828696, + "loss": 0.3841, + "step": 410 + }, + { + "epoch": 0.01621684234912545, + "grad_norm": 9.00206184387207, + "learning_rate": 0.00019892145127868514, + "loss": 0.5891, + "step": 420 + }, + { + "epoch": 0.016602957643152245, + "grad_norm": 7.684702396392822, + "learning_rate": 0.00019889571025908339, + "loss": 0.4868, + "step": 430 + }, + { + "epoch": 0.01698907293717904, + "grad_norm": 4.198502540588379, + "learning_rate": 0.00019886996923948157, + "loss": 0.571, + "step": 440 + }, + { + "epoch": 0.01737518823120584, + "grad_norm": 7.454501628875732, + "learning_rate": 0.0001988442282198798, + "loss": 0.5133, + "step": 450 + }, + { + "epoch": 0.017761303525232634, + "grad_norm": 13.236722946166992, + "learning_rate": 0.000198818487200278, + "loss": 0.4139, + "step": 460 + }, + { + "epoch": 0.01814741881925943, + "grad_norm": 6.4592390060424805, + "learning_rate": 0.00019879274618067624, + "loss": 0.6078, + "step": 470 + }, + { + "epoch": 0.01853353411328623, + "grad_norm": 11.73417854309082, + "learning_rate": 0.00019876700516107445, + "loss": 0.5472, + "step": 480 + }, + { + "epoch": 0.018919649407313024, + "grad_norm": 2.5162808895111084, + "learning_rate": 0.00019874126414147264, + "loss": 0.6611, + "step": 490 + }, + { + "epoch": 0.01930576470133982, + "grad_norm": 4.9637837409973145, + "learning_rate": 0.00019871552312187088, + "loss": 0.6472, + "step": 500 + }, + { + "epoch": 0.019691879995366618, + "grad_norm": 11.545489311218262, + "learning_rate": 0.00019868978210226906, + "loss": 0.5304, + "step": 510 + }, + { + "epoch": 0.020077995289393413, + "grad_norm": 5.197858810424805, + "learning_rate": 0.0001986640410826673, + "loss": 0.605, + "step": 520 + }, + { + "epoch": 0.02046411058342021, + "grad_norm": 4.935055255889893, + "learning_rate": 0.0001986383000630655, + "loss": 0.6524, + "step": 530 + }, + { + "epoch": 0.020850225877447004, + "grad_norm": 5.838052749633789, + "learning_rate": 0.00019861255904346373, + "loss": 0.4957, + "step": 540 + }, + { + "epoch": 0.021236341171473803, + "grad_norm": 4.682408809661865, + "learning_rate": 0.00019858681802386194, + "loss": 0.8523, + "step": 550 + }, + { + "epoch": 0.021622456465500598, + "grad_norm": 10.720857620239258, + "learning_rate": 0.00019856107700426013, + "loss": 0.516, + "step": 560 + }, + { + "epoch": 0.022008571759527393, + "grad_norm": 6.515562534332275, + "learning_rate": 0.00019853533598465837, + "loss": 0.6095, + "step": 570 + }, + { + "epoch": 0.022394687053554192, + "grad_norm": 3.204960584640503, + "learning_rate": 0.00019850959496505656, + "loss": 0.6624, + "step": 580 + }, + { + "epoch": 0.022780802347580988, + "grad_norm": 2.305497884750366, + "learning_rate": 0.0001984838539454548, + "loss": 0.5986, + "step": 590 + }, + { + "epoch": 0.023166917641607783, + "grad_norm": 13.07105541229248, + "learning_rate": 0.000198458112925853, + "loss": 0.337, + "step": 600 + }, + { + "epoch": 0.023553032935634582, + "grad_norm": 3.1491329669952393, + "learning_rate": 0.00019843237190625122, + "loss": 0.5466, + "step": 610 + }, + { + "epoch": 0.023939148229661377, + "grad_norm": 1.92014479637146, + "learning_rate": 0.00019840663088664944, + "loss": 0.5525, + "step": 620 + }, + { + "epoch": 0.024325263523688172, + "grad_norm": 2.206550121307373, + "learning_rate": 0.00019838088986704762, + "loss": 0.5069, + "step": 630 + }, + { + "epoch": 0.02471137881771497, + "grad_norm": 2.386288642883301, + "learning_rate": 0.00019835514884744586, + "loss": 0.3749, + "step": 640 + }, + { + "epoch": 0.025097494111741767, + "grad_norm": 7.393959045410156, + "learning_rate": 0.00019832940782784405, + "loss": 0.7144, + "step": 650 + }, + { + "epoch": 0.025483609405768562, + "grad_norm": 5.7293171882629395, + "learning_rate": 0.0001983036668082423, + "loss": 0.5052, + "step": 660 + }, + { + "epoch": 0.025869724699795357, + "grad_norm": 6.440220832824707, + "learning_rate": 0.0001982779257886405, + "loss": 0.4343, + "step": 670 + }, + { + "epoch": 0.026255839993822156, + "grad_norm": 0.8553487658500671, + "learning_rate": 0.00019825218476903872, + "loss": 0.7562, + "step": 680 + }, + { + "epoch": 0.02664195528784895, + "grad_norm": 3.762784719467163, + "learning_rate": 0.00019822644374943693, + "loss": 0.5593, + "step": 690 + }, + { + "epoch": 0.027028070581875747, + "grad_norm": 11.674392700195312, + "learning_rate": 0.00019820070272983512, + "loss": 0.6069, + "step": 700 + }, + { + "epoch": 0.027414185875902546, + "grad_norm": 8.631232261657715, + "learning_rate": 0.00019817496171023336, + "loss": 0.3584, + "step": 710 + }, + { + "epoch": 0.02780030116992934, + "grad_norm": 5.7163920402526855, + "learning_rate": 0.00019814922069063157, + "loss": 0.5563, + "step": 720 + }, + { + "epoch": 0.028186416463956136, + "grad_norm": 8.186172485351562, + "learning_rate": 0.00019812347967102978, + "loss": 0.5289, + "step": 730 + }, + { + "epoch": 0.028572531757982935, + "grad_norm": 7.287814140319824, + "learning_rate": 0.000198097738651428, + "loss": 0.4543, + "step": 740 + }, + { + "epoch": 0.02895864705200973, + "grad_norm": 6.621245384216309, + "learning_rate": 0.0001980719976318262, + "loss": 0.3244, + "step": 750 + }, + { + "epoch": 0.029344762346036526, + "grad_norm": 3.5209403038024902, + "learning_rate": 0.00019804625661222442, + "loss": 0.5385, + "step": 760 + }, + { + "epoch": 0.029730877640063325, + "grad_norm": 2.562343120574951, + "learning_rate": 0.00019802051559262264, + "loss": 0.4868, + "step": 770 + }, + { + "epoch": 0.03011699293409012, + "grad_norm": 7.782780647277832, + "learning_rate": 0.00019799477457302085, + "loss": 0.5682, + "step": 780 + }, + { + "epoch": 0.030503108228116915, + "grad_norm": 8.173531532287598, + "learning_rate": 0.00019796903355341906, + "loss": 0.3557, + "step": 790 + }, + { + "epoch": 0.03088922352214371, + "grad_norm": 4.502675533294678, + "learning_rate": 0.00019794329253381728, + "loss": 0.517, + "step": 800 + }, + { + "epoch": 0.031275338816170506, + "grad_norm": 6.314894199371338, + "learning_rate": 0.0001979175515142155, + "loss": 0.6392, + "step": 810 + }, + { + "epoch": 0.03166145411019731, + "grad_norm": 7.054763317108154, + "learning_rate": 0.0001978918104946137, + "loss": 0.5823, + "step": 820 + }, + { + "epoch": 0.032047569404224104, + "grad_norm": 1.7847551107406616, + "learning_rate": 0.00019786606947501192, + "loss": 0.4495, + "step": 830 + }, + { + "epoch": 0.0324336846982509, + "grad_norm": 5.268612861633301, + "learning_rate": 0.00019784032845541013, + "loss": 0.4379, + "step": 840 + }, + { + "epoch": 0.032819799992277694, + "grad_norm": 4.834717273712158, + "learning_rate": 0.00019781458743580834, + "loss": 0.5379, + "step": 850 + }, + { + "epoch": 0.03320591528630449, + "grad_norm": 6.077203273773193, + "learning_rate": 0.00019778884641620656, + "loss": 0.5666, + "step": 860 + }, + { + "epoch": 0.033592030580331285, + "grad_norm": 0.9583851099014282, + "learning_rate": 0.00019776310539660477, + "loss": 0.8146, + "step": 870 + }, + { + "epoch": 0.03397814587435808, + "grad_norm": 0.8457469940185547, + "learning_rate": 0.00019773736437700298, + "loss": 0.3497, + "step": 880 + }, + { + "epoch": 0.03436426116838488, + "grad_norm": 5.251153945922852, + "learning_rate": 0.0001977116233574012, + "loss": 0.6299, + "step": 890 + }, + { + "epoch": 0.03475037646241168, + "grad_norm": 4.057605266571045, + "learning_rate": 0.0001976858823377994, + "loss": 0.5829, + "step": 900 + }, + { + "epoch": 0.03513649175643847, + "grad_norm": 7.625199794769287, + "learning_rate": 0.00019766014131819762, + "loss": 0.452, + "step": 910 + }, + { + "epoch": 0.03552260705046527, + "grad_norm": 4.618866443634033, + "learning_rate": 0.00019763440029859584, + "loss": 0.5693, + "step": 920 + }, + { + "epoch": 0.035908722344492064, + "grad_norm": 8.480955123901367, + "learning_rate": 0.00019760865927899405, + "loss": 0.4894, + "step": 930 + }, + { + "epoch": 0.03629483763851886, + "grad_norm": 6.537581920623779, + "learning_rate": 0.00019758291825939226, + "loss": 0.7396, + "step": 940 + }, + { + "epoch": 0.03668095293254566, + "grad_norm": 8.093205451965332, + "learning_rate": 0.00019755717723979048, + "loss": 0.3702, + "step": 950 + }, + { + "epoch": 0.03706706822657246, + "grad_norm": 2.523141860961914, + "learning_rate": 0.0001975314362201887, + "loss": 0.3746, + "step": 960 + }, + { + "epoch": 0.03745318352059925, + "grad_norm": 1.2707194089889526, + "learning_rate": 0.0001975056952005869, + "loss": 0.3211, + "step": 970 + }, + { + "epoch": 0.03783929881462605, + "grad_norm": 2.818798780441284, + "learning_rate": 0.00019747995418098512, + "loss": 0.6594, + "step": 980 + }, + { + "epoch": 0.03822541410865284, + "grad_norm": 7.373154640197754, + "learning_rate": 0.00019745421316138333, + "loss": 0.7825, + "step": 990 + }, + { + "epoch": 0.03861152940267964, + "grad_norm": 2.8925669193267822, + "learning_rate": 0.00019742847214178154, + "loss": 0.4847, + "step": 1000 + }, + { + "epoch": 0.038997644696706434, + "grad_norm": 10.87833023071289, + "learning_rate": 0.00019740273112217976, + "loss": 0.5098, + "step": 1010 + }, + { + "epoch": 0.039383759990733236, + "grad_norm": 3.8262317180633545, + "learning_rate": 0.00019737699010257797, + "loss": 0.5168, + "step": 1020 + }, + { + "epoch": 0.03976987528476003, + "grad_norm": 2.5567004680633545, + "learning_rate": 0.00019735124908297618, + "loss": 0.5597, + "step": 1030 + }, + { + "epoch": 0.04015599057878683, + "grad_norm": 4.385695457458496, + "learning_rate": 0.0001973255080633744, + "loss": 0.3187, + "step": 1040 + }, + { + "epoch": 0.04054210587281362, + "grad_norm": 1.8186907768249512, + "learning_rate": 0.0001972997670437726, + "loss": 0.6274, + "step": 1050 + }, + { + "epoch": 0.04092822116684042, + "grad_norm": 7.446481704711914, + "learning_rate": 0.00019727402602417082, + "loss": 0.4365, + "step": 1060 + }, + { + "epoch": 0.04131433646086721, + "grad_norm": 8.973576545715332, + "learning_rate": 0.00019724828500456904, + "loss": 0.6518, + "step": 1070 + }, + { + "epoch": 0.04170045175489401, + "grad_norm": 4.984101295471191, + "learning_rate": 0.00019722254398496725, + "loss": 0.4694, + "step": 1080 + }, + { + "epoch": 0.04208656704892081, + "grad_norm": 8.2625093460083, + "learning_rate": 0.00019719680296536546, + "loss": 0.5532, + "step": 1090 + }, + { + "epoch": 0.042472682342947606, + "grad_norm": 2.2425265312194824, + "learning_rate": 0.0001971710619457637, + "loss": 0.4011, + "step": 1100 + }, + { + "epoch": 0.0428587976369744, + "grad_norm": 1.4552969932556152, + "learning_rate": 0.0001971453209261619, + "loss": 0.396, + "step": 1110 + }, + { + "epoch": 0.043244912931001196, + "grad_norm": 2.0675857067108154, + "learning_rate": 0.0001971195799065601, + "loss": 0.3727, + "step": 1120 + }, + { + "epoch": 0.04363102822502799, + "grad_norm": 5.24460506439209, + "learning_rate": 0.00019709383888695832, + "loss": 0.5016, + "step": 1130 + }, + { + "epoch": 0.04401714351905479, + "grad_norm": 5.524955749511719, + "learning_rate": 0.00019706809786735653, + "loss": 0.5866, + "step": 1140 + }, + { + "epoch": 0.04440325881308159, + "grad_norm": 4.2749199867248535, + "learning_rate": 0.00019704235684775474, + "loss": 0.4591, + "step": 1150 + }, + { + "epoch": 0.044789374107108385, + "grad_norm": 3.460395336151123, + "learning_rate": 0.00019701661582815296, + "loss": 0.5275, + "step": 1160 + }, + { + "epoch": 0.04517548940113518, + "grad_norm": 1.3979772329330444, + "learning_rate": 0.0001969908748085512, + "loss": 0.3375, + "step": 1170 + }, + { + "epoch": 0.045561604695161975, + "grad_norm": 2.29923939704895, + "learning_rate": 0.00019696513378894938, + "loss": 0.5683, + "step": 1180 + }, + { + "epoch": 0.04594771998918877, + "grad_norm": 3.211496353149414, + "learning_rate": 0.0001969393927693476, + "loss": 0.7122, + "step": 1190 + }, + { + "epoch": 0.046333835283215566, + "grad_norm": 4.18447208404541, + "learning_rate": 0.0001969136517497458, + "loss": 0.5149, + "step": 1200 + }, + { + "epoch": 0.04671995057724236, + "grad_norm": 14.650918960571289, + "learning_rate": 0.00019688791073014402, + "loss": 0.6384, + "step": 1210 + }, + { + "epoch": 0.047106065871269164, + "grad_norm": 4.956239700317383, + "learning_rate": 0.00019686216971054226, + "loss": 0.3602, + "step": 1220 + }, + { + "epoch": 0.04749218116529596, + "grad_norm": 4.0486860275268555, + "learning_rate": 0.00019683642869094045, + "loss": 0.7719, + "step": 1230 + }, + { + "epoch": 0.047878296459322754, + "grad_norm": 4.587133407592773, + "learning_rate": 0.0001968106876713387, + "loss": 0.4079, + "step": 1240 + }, + { + "epoch": 0.04826441175334955, + "grad_norm": 0.7830008268356323, + "learning_rate": 0.00019678494665173688, + "loss": 0.5841, + "step": 1250 + }, + { + "epoch": 0.048650527047376345, + "grad_norm": 5.378068447113037, + "learning_rate": 0.0001967592056321351, + "loss": 0.3226, + "step": 1260 + }, + { + "epoch": 0.04903664234140314, + "grad_norm": 4.002605438232422, + "learning_rate": 0.0001967334646125333, + "loss": 0.4411, + "step": 1270 + }, + { + "epoch": 0.04942275763542994, + "grad_norm": 4.695134162902832, + "learning_rate": 0.00019670772359293152, + "loss": 0.415, + "step": 1280 + }, + { + "epoch": 0.04980887292945674, + "grad_norm": 6.046143054962158, + "learning_rate": 0.00019668198257332976, + "loss": 0.463, + "step": 1290 + }, + { + "epoch": 0.05019498822348353, + "grad_norm": 1.8300361633300781, + "learning_rate": 0.00019665624155372794, + "loss": 0.408, + "step": 1300 + }, + { + "epoch": 0.05058110351751033, + "grad_norm": 5.80141544342041, + "learning_rate": 0.00019663050053412618, + "loss": 0.4481, + "step": 1310 + }, + { + "epoch": 0.050967218811537124, + "grad_norm": 4.103593349456787, + "learning_rate": 0.00019660475951452437, + "loss": 0.3054, + "step": 1320 + }, + { + "epoch": 0.05135333410556392, + "grad_norm": 9.129929542541504, + "learning_rate": 0.00019657901849492258, + "loss": 0.5554, + "step": 1330 + }, + { + "epoch": 0.051739449399590715, + "grad_norm": 6.979663372039795, + "learning_rate": 0.0001965532774753208, + "loss": 0.388, + "step": 1340 + }, + { + "epoch": 0.05212556469361752, + "grad_norm": 6.329915523529053, + "learning_rate": 0.000196527536455719, + "loss": 0.8894, + "step": 1350 + }, + { + "epoch": 0.05251167998764431, + "grad_norm": 0.2686227262020111, + "learning_rate": 0.00019650179543611725, + "loss": 0.5032, + "step": 1360 + }, + { + "epoch": 0.05289779528167111, + "grad_norm": 4.818896770477295, + "learning_rate": 0.00019647605441651544, + "loss": 0.3417, + "step": 1370 + }, + { + "epoch": 0.0532839105756979, + "grad_norm": 6.252008438110352, + "learning_rate": 0.00019645031339691368, + "loss": 0.398, + "step": 1380 + }, + { + "epoch": 0.0536700258697247, + "grad_norm": 1.2734620571136475, + "learning_rate": 0.00019642457237731186, + "loss": 0.5369, + "step": 1390 + }, + { + "epoch": 0.054056141163751494, + "grad_norm": 6.511690616607666, + "learning_rate": 0.00019639883135771008, + "loss": 0.4646, + "step": 1400 + }, + { + "epoch": 0.054442256457778296, + "grad_norm": 3.2352371215820312, + "learning_rate": 0.00019637309033810832, + "loss": 0.3805, + "step": 1410 + }, + { + "epoch": 0.05482837175180509, + "grad_norm": 1.0574132204055786, + "learning_rate": 0.0001963473493185065, + "loss": 0.4572, + "step": 1420 + }, + { + "epoch": 0.055214487045831886, + "grad_norm": 2.8525452613830566, + "learning_rate": 0.00019632160829890474, + "loss": 0.4417, + "step": 1430 + }, + { + "epoch": 0.05560060233985868, + "grad_norm": 3.588179111480713, + "learning_rate": 0.00019629586727930293, + "loss": 0.6214, + "step": 1440 + }, + { + "epoch": 0.05598671763388548, + "grad_norm": 3.969320058822632, + "learning_rate": 0.00019627012625970117, + "loss": 0.6114, + "step": 1450 + }, + { + "epoch": 0.05637283292791227, + "grad_norm": 3.465053081512451, + "learning_rate": 0.00019624438524009936, + "loss": 0.6066, + "step": 1460 + }, + { + "epoch": 0.05675894822193907, + "grad_norm": 3.5419201850891113, + "learning_rate": 0.00019621864422049757, + "loss": 0.3906, + "step": 1470 + }, + { + "epoch": 0.05714506351596587, + "grad_norm": 3.4580233097076416, + "learning_rate": 0.0001961929032008958, + "loss": 0.9283, + "step": 1480 + }, + { + "epoch": 0.057531178809992665, + "grad_norm": 4.222144603729248, + "learning_rate": 0.000196167162181294, + "loss": 0.4225, + "step": 1490 + }, + { + "epoch": 0.05791729410401946, + "grad_norm": 0.8072681427001953, + "learning_rate": 0.00019614142116169224, + "loss": 0.5012, + "step": 1500 + }, + { + "epoch": 0.058303409398046256, + "grad_norm": 2.827258348464966, + "learning_rate": 0.00019611568014209042, + "loss": 0.4333, + "step": 1510 + }, + { + "epoch": 0.05868952469207305, + "grad_norm": 1.3494776487350464, + "learning_rate": 0.00019608993912248866, + "loss": 0.3019, + "step": 1520 + }, + { + "epoch": 0.05907563998609985, + "grad_norm": 4.3279900550842285, + "learning_rate": 0.00019606419810288688, + "loss": 0.4807, + "step": 1530 + }, + { + "epoch": 0.05946175528012665, + "grad_norm": 3.8996474742889404, + "learning_rate": 0.0001960384570832851, + "loss": 0.4876, + "step": 1540 + }, + { + "epoch": 0.059847870574153444, + "grad_norm": 5.255978584289551, + "learning_rate": 0.0001960127160636833, + "loss": 0.4661, + "step": 1550 + }, + { + "epoch": 0.06023398586818024, + "grad_norm": 5.172120094299316, + "learning_rate": 0.0001959869750440815, + "loss": 0.4885, + "step": 1560 + }, + { + "epoch": 0.060620101162207035, + "grad_norm": 5.385959625244141, + "learning_rate": 0.00019596123402447973, + "loss": 0.2995, + "step": 1570 + }, + { + "epoch": 0.06100621645623383, + "grad_norm": 3.9922871589660645, + "learning_rate": 0.00019593549300487792, + "loss": 0.4568, + "step": 1580 + }, + { + "epoch": 0.061392331750260626, + "grad_norm": 6.048642158508301, + "learning_rate": 0.00019590975198527616, + "loss": 0.4649, + "step": 1590 + }, + { + "epoch": 0.06177844704428742, + "grad_norm": 1.0315563678741455, + "learning_rate": 0.00019588401096567437, + "loss": 0.5175, + "step": 1600 + }, + { + "epoch": 0.06216456233831422, + "grad_norm": 12.403678894042969, + "learning_rate": 0.00019585826994607258, + "loss": 0.523, + "step": 1610 + }, + { + "epoch": 0.06255067763234101, + "grad_norm": 6.127188205718994, + "learning_rate": 0.0001958325289264708, + "loss": 0.5861, + "step": 1620 + }, + { + "epoch": 0.06293679292636781, + "grad_norm": 6.398592948913574, + "learning_rate": 0.00019580678790686898, + "loss": 0.7471, + "step": 1630 + }, + { + "epoch": 0.06332290822039462, + "grad_norm": 4.127200603485107, + "learning_rate": 0.00019578104688726722, + "loss": 0.4921, + "step": 1640 + }, + { + "epoch": 0.06370902351442141, + "grad_norm": 4.601541042327881, + "learning_rate": 0.0001957553058676654, + "loss": 0.3682, + "step": 1650 + }, + { + "epoch": 0.06409513880844821, + "grad_norm": 6.32781457901001, + "learning_rate": 0.00019572956484806365, + "loss": 0.3748, + "step": 1660 + }, + { + "epoch": 0.064481254102475, + "grad_norm": 3.7280173301696777, + "learning_rate": 0.00019570382382846186, + "loss": 0.5912, + "step": 1670 + }, + { + "epoch": 0.0648673693965018, + "grad_norm": 6.7821946144104, + "learning_rate": 0.00019567808280886008, + "loss": 0.4073, + "step": 1680 + }, + { + "epoch": 0.0652534846905286, + "grad_norm": 1.4645791053771973, + "learning_rate": 0.0001956523417892583, + "loss": 0.7164, + "step": 1690 + }, + { + "epoch": 0.06563959998455539, + "grad_norm": 2.367361545562744, + "learning_rate": 0.00019562660076965648, + "loss": 0.3859, + "step": 1700 + }, + { + "epoch": 0.06602571527858218, + "grad_norm": 2.198493480682373, + "learning_rate": 0.00019560085975005472, + "loss": 0.4928, + "step": 1710 + }, + { + "epoch": 0.06641183057260898, + "grad_norm": 1.882567048072815, + "learning_rate": 0.00019557511873045293, + "loss": 0.5861, + "step": 1720 + }, + { + "epoch": 0.06679794586663577, + "grad_norm": 6.324089527130127, + "learning_rate": 0.00019554937771085114, + "loss": 0.6249, + "step": 1730 + }, + { + "epoch": 0.06718406116066257, + "grad_norm": 4.283392906188965, + "learning_rate": 0.00019552363669124936, + "loss": 0.5403, + "step": 1740 + }, + { + "epoch": 0.06757017645468937, + "grad_norm": 4.464428424835205, + "learning_rate": 0.00019549789567164757, + "loss": 0.5815, + "step": 1750 + }, + { + "epoch": 0.06795629174871616, + "grad_norm": 0.32923218607902527, + "learning_rate": 0.00019547215465204578, + "loss": 0.3791, + "step": 1760 + }, + { + "epoch": 0.06834240704274297, + "grad_norm": 5.255763053894043, + "learning_rate": 0.00019544641363244397, + "loss": 0.4252, + "step": 1770 + }, + { + "epoch": 0.06872852233676977, + "grad_norm": 2.1615116596221924, + "learning_rate": 0.0001954206726128422, + "loss": 0.4457, + "step": 1780 + }, + { + "epoch": 0.06911463763079656, + "grad_norm": 2.0209217071533203, + "learning_rate": 0.00019539493159324042, + "loss": 0.4377, + "step": 1790 + }, + { + "epoch": 0.06950075292482336, + "grad_norm": 8.12317943572998, + "learning_rate": 0.00019536919057363864, + "loss": 0.4025, + "step": 1800 + }, + { + "epoch": 0.06988686821885015, + "grad_norm": 1.7034660577774048, + "learning_rate": 0.00019534344955403685, + "loss": 0.2915, + "step": 1810 + }, + { + "epoch": 0.07027298351287695, + "grad_norm": 4.640942096710205, + "learning_rate": 0.00019531770853443506, + "loss": 0.6588, + "step": 1820 + }, + { + "epoch": 0.07065909880690374, + "grad_norm": 4.202883243560791, + "learning_rate": 0.00019529196751483328, + "loss": 0.4442, + "step": 1830 + }, + { + "epoch": 0.07104521410093054, + "grad_norm": 3.26898193359375, + "learning_rate": 0.00019526622649523146, + "loss": 0.5065, + "step": 1840 + }, + { + "epoch": 0.07143132939495733, + "grad_norm": 8.189995765686035, + "learning_rate": 0.0001952404854756297, + "loss": 0.5258, + "step": 1850 + }, + { + "epoch": 0.07181744468898413, + "grad_norm": 3.2618284225463867, + "learning_rate": 0.00019521474445602792, + "loss": 0.5037, + "step": 1860 + }, + { + "epoch": 0.07220355998301092, + "grad_norm": 2.168548583984375, + "learning_rate": 0.00019518900343642613, + "loss": 0.4887, + "step": 1870 + }, + { + "epoch": 0.07258967527703772, + "grad_norm": 2.2029404640197754, + "learning_rate": 0.00019516326241682434, + "loss": 0.4646, + "step": 1880 + }, + { + "epoch": 0.07297579057106451, + "grad_norm": 1.561713695526123, + "learning_rate": 0.00019513752139722256, + "loss": 0.432, + "step": 1890 + }, + { + "epoch": 0.07336190586509132, + "grad_norm": 3.428372621536255, + "learning_rate": 0.00019511178037762077, + "loss": 0.4124, + "step": 1900 + }, + { + "epoch": 0.07374802115911812, + "grad_norm": 5.6706671714782715, + "learning_rate": 0.00019508603935801898, + "loss": 0.4431, + "step": 1910 + }, + { + "epoch": 0.07413413645314491, + "grad_norm": 8.872734069824219, + "learning_rate": 0.0001950602983384172, + "loss": 0.7001, + "step": 1920 + }, + { + "epoch": 0.07452025174717171, + "grad_norm": 1.6821974515914917, + "learning_rate": 0.0001950345573188154, + "loss": 0.4204, + "step": 1930 + }, + { + "epoch": 0.0749063670411985, + "grad_norm": 3.117480993270874, + "learning_rate": 0.00019500881629921362, + "loss": 0.3748, + "step": 1940 + }, + { + "epoch": 0.0752924823352253, + "grad_norm": 0.8384984731674194, + "learning_rate": 0.00019498307527961184, + "loss": 0.2636, + "step": 1950 + }, + { + "epoch": 0.0756785976292521, + "grad_norm": 2.8956708908081055, + "learning_rate": 0.00019495733426001005, + "loss": 0.4514, + "step": 1960 + }, + { + "epoch": 0.07606471292327889, + "grad_norm": 11.233087539672852, + "learning_rate": 0.00019493159324040826, + "loss": 0.4002, + "step": 1970 + }, + { + "epoch": 0.07645082821730569, + "grad_norm": 4.066893100738525, + "learning_rate": 0.00019490585222080648, + "loss": 0.4449, + "step": 1980 + }, + { + "epoch": 0.07683694351133248, + "grad_norm": 4.854077339172363, + "learning_rate": 0.0001948801112012047, + "loss": 0.4961, + "step": 1990 + }, + { + "epoch": 0.07722305880535928, + "grad_norm": 2.5722827911376953, + "learning_rate": 0.0001948543701816029, + "loss": 0.3743, + "step": 2000 + }, + { + "epoch": 0.07760917409938607, + "grad_norm": 5.842077255249023, + "learning_rate": 0.00019482862916200112, + "loss": 0.2906, + "step": 2010 + }, + { + "epoch": 0.07799528939341287, + "grad_norm": 6.163092136383057, + "learning_rate": 0.00019480288814239933, + "loss": 0.4374, + "step": 2020 + }, + { + "epoch": 0.07838140468743968, + "grad_norm": 4.589334487915039, + "learning_rate": 0.00019477714712279754, + "loss": 0.484, + "step": 2030 + }, + { + "epoch": 0.07876751998146647, + "grad_norm": 6.951212406158447, + "learning_rate": 0.00019475140610319576, + "loss": 0.5767, + "step": 2040 + }, + { + "epoch": 0.07915363527549327, + "grad_norm": 3.3662521839141846, + "learning_rate": 0.00019472566508359397, + "loss": 0.5566, + "step": 2050 + }, + { + "epoch": 0.07953975056952006, + "grad_norm": 1.6602391004562378, + "learning_rate": 0.00019469992406399218, + "loss": 0.1436, + "step": 2060 + }, + { + "epoch": 0.07992586586354686, + "grad_norm": 6.451857089996338, + "learning_rate": 0.0001946741830443904, + "loss": 0.3778, + "step": 2070 + }, + { + "epoch": 0.08031198115757365, + "grad_norm": 3.7249560356140137, + "learning_rate": 0.0001946484420247886, + "loss": 0.5391, + "step": 2080 + }, + { + "epoch": 0.08069809645160045, + "grad_norm": 4.138098239898682, + "learning_rate": 0.00019462270100518682, + "loss": 0.3598, + "step": 2090 + }, + { + "epoch": 0.08108421174562724, + "grad_norm": 3.224778175354004, + "learning_rate": 0.00019459695998558504, + "loss": 0.2967, + "step": 2100 + }, + { + "epoch": 0.08147032703965404, + "grad_norm": 1.3951358795166016, + "learning_rate": 0.00019457121896598325, + "loss": 0.2698, + "step": 2110 + }, + { + "epoch": 0.08185644233368083, + "grad_norm": 5.956802845001221, + "learning_rate": 0.00019454547794638146, + "loss": 0.451, + "step": 2120 + }, + { + "epoch": 0.08224255762770763, + "grad_norm": 3.456360101699829, + "learning_rate": 0.00019451973692677968, + "loss": 0.3365, + "step": 2130 + }, + { + "epoch": 0.08262867292173443, + "grad_norm": 2.9433653354644775, + "learning_rate": 0.0001944939959071779, + "loss": 0.4424, + "step": 2140 + }, + { + "epoch": 0.08301478821576122, + "grad_norm": 3.136000871658325, + "learning_rate": 0.0001944682548875761, + "loss": 0.6224, + "step": 2150 + }, + { + "epoch": 0.08340090350978802, + "grad_norm": 2.669084310531616, + "learning_rate": 0.00019444251386797432, + "loss": 0.4435, + "step": 2160 + }, + { + "epoch": 0.08378701880381483, + "grad_norm": 4.573731899261475, + "learning_rate": 0.00019441677284837253, + "loss": 0.5555, + "step": 2170 + }, + { + "epoch": 0.08417313409784162, + "grad_norm": 6.354156017303467, + "learning_rate": 0.00019439103182877074, + "loss": 0.4232, + "step": 2180 + }, + { + "epoch": 0.08455924939186842, + "grad_norm": 2.993691921234131, + "learning_rate": 0.00019436529080916895, + "loss": 0.51, + "step": 2190 + }, + { + "epoch": 0.08494536468589521, + "grad_norm": 3.6496782302856445, + "learning_rate": 0.00019433954978956717, + "loss": 0.4031, + "step": 2200 + }, + { + "epoch": 0.085331479979922, + "grad_norm": 1.9039051532745361, + "learning_rate": 0.00019431380876996538, + "loss": 0.4407, + "step": 2210 + }, + { + "epoch": 0.0857175952739488, + "grad_norm": 2.3211915493011475, + "learning_rate": 0.00019428806775036362, + "loss": 0.4057, + "step": 2220 + }, + { + "epoch": 0.0861037105679756, + "grad_norm": 4.883905410766602, + "learning_rate": 0.0001942623267307618, + "loss": 0.6223, + "step": 2230 + }, + { + "epoch": 0.08648982586200239, + "grad_norm": 2.164484977722168, + "learning_rate": 0.00019423658571116005, + "loss": 0.3141, + "step": 2240 + }, + { + "epoch": 0.08687594115602919, + "grad_norm": 2.2078909873962402, + "learning_rate": 0.00019421084469155823, + "loss": 0.3523, + "step": 2250 + }, + { + "epoch": 0.08726205645005598, + "grad_norm": 1.0987967252731323, + "learning_rate": 0.00019418510367195645, + "loss": 0.4013, + "step": 2260 + }, + { + "epoch": 0.08764817174408278, + "grad_norm": 2.418327569961548, + "learning_rate": 0.00019415936265235466, + "loss": 0.581, + "step": 2270 + }, + { + "epoch": 0.08803428703810957, + "grad_norm": 4.029652118682861, + "learning_rate": 0.00019413362163275287, + "loss": 0.5244, + "step": 2280 + }, + { + "epoch": 0.08842040233213637, + "grad_norm": 3.4661777019500732, + "learning_rate": 0.00019410788061315112, + "loss": 0.4531, + "step": 2290 + }, + { + "epoch": 0.08880651762616318, + "grad_norm": 1.0475856065750122, + "learning_rate": 0.0001940821395935493, + "loss": 0.4362, + "step": 2300 + }, + { + "epoch": 0.08919263292018997, + "grad_norm": 4.021854400634766, + "learning_rate": 0.00019405639857394754, + "loss": 0.4532, + "step": 2310 + }, + { + "epoch": 0.08957874821421677, + "grad_norm": 1.836438536643982, + "learning_rate": 0.00019403065755434573, + "loss": 0.6858, + "step": 2320 + }, + { + "epoch": 0.08996486350824356, + "grad_norm": 2.5251567363739014, + "learning_rate": 0.00019400491653474394, + "loss": 0.3619, + "step": 2330 + }, + { + "epoch": 0.09035097880227036, + "grad_norm": 3.067208766937256, + "learning_rate": 0.00019397917551514215, + "loss": 0.7376, + "step": 2340 + }, + { + "epoch": 0.09073709409629716, + "grad_norm": 0.9124518036842346, + "learning_rate": 0.00019395343449554037, + "loss": 0.4193, + "step": 2350 + }, + { + "epoch": 0.09112320939032395, + "grad_norm": 3.8170812129974365, + "learning_rate": 0.0001939276934759386, + "loss": 0.5393, + "step": 2360 + }, + { + "epoch": 0.09150932468435075, + "grad_norm": 8.19250202178955, + "learning_rate": 0.0001939019524563368, + "loss": 0.424, + "step": 2370 + }, + { + "epoch": 0.09189543997837754, + "grad_norm": 4.459112167358398, + "learning_rate": 0.00019387621143673503, + "loss": 0.3278, + "step": 2380 + }, + { + "epoch": 0.09228155527240434, + "grad_norm": 5.578339576721191, + "learning_rate": 0.00019385047041713322, + "loss": 0.5223, + "step": 2390 + }, + { + "epoch": 0.09266767056643113, + "grad_norm": 1.3707878589630127, + "learning_rate": 0.00019382472939753143, + "loss": 0.3004, + "step": 2400 + }, + { + "epoch": 0.09305378586045793, + "grad_norm": 5.0041184425354, + "learning_rate": 0.00019379898837792967, + "loss": 0.4378, + "step": 2410 + }, + { + "epoch": 0.09343990115448472, + "grad_norm": 5.668384552001953, + "learning_rate": 0.00019377324735832786, + "loss": 0.499, + "step": 2420 + }, + { + "epoch": 0.09382601644851153, + "grad_norm": 5.605838775634766, + "learning_rate": 0.0001937475063387261, + "loss": 0.5642, + "step": 2430 + }, + { + "epoch": 0.09421213174253833, + "grad_norm": 5.055904865264893, + "learning_rate": 0.0001937217653191243, + "loss": 0.6225, + "step": 2440 + }, + { + "epoch": 0.09459824703656512, + "grad_norm": 3.1779348850250244, + "learning_rate": 0.00019369602429952253, + "loss": 0.3673, + "step": 2450 + }, + { + "epoch": 0.09498436233059192, + "grad_norm": 2.540269136428833, + "learning_rate": 0.00019367028327992071, + "loss": 0.3499, + "step": 2460 + }, + { + "epoch": 0.09537047762461871, + "grad_norm": 2.284114122390747, + "learning_rate": 0.00019364454226031893, + "loss": 0.5405, + "step": 2470 + }, + { + "epoch": 0.09575659291864551, + "grad_norm": 6.752682685852051, + "learning_rate": 0.00019361880124071717, + "loss": 0.5594, + "step": 2480 + }, + { + "epoch": 0.0961427082126723, + "grad_norm": 2.5659310817718506, + "learning_rate": 0.00019359306022111535, + "loss": 0.5872, + "step": 2490 + }, + { + "epoch": 0.0965288235066991, + "grad_norm": 4.503110885620117, + "learning_rate": 0.0001935673192015136, + "loss": 0.5171, + "step": 2500 + }, + { + "epoch": 0.0969149388007259, + "grad_norm": 1.7715080976486206, + "learning_rate": 0.00019354157818191178, + "loss": 0.6131, + "step": 2510 + }, + { + "epoch": 0.09730105409475269, + "grad_norm": 4.479000568389893, + "learning_rate": 0.00019351583716231002, + "loss": 0.4396, + "step": 2520 + }, + { + "epoch": 0.09768716938877949, + "grad_norm": 0.9424387812614441, + "learning_rate": 0.00019349009614270823, + "loss": 0.3166, + "step": 2530 + }, + { + "epoch": 0.09807328468280628, + "grad_norm": 3.792689800262451, + "learning_rate": 0.00019346435512310642, + "loss": 0.2591, + "step": 2540 + }, + { + "epoch": 0.09845939997683308, + "grad_norm": 2.9132003784179688, + "learning_rate": 0.00019343861410350466, + "loss": 0.3523, + "step": 2550 + }, + { + "epoch": 0.09884551527085988, + "grad_norm": 1.6421749591827393, + "learning_rate": 0.00019341287308390285, + "loss": 0.2805, + "step": 2560 + }, + { + "epoch": 0.09923163056488668, + "grad_norm": 1.9469813108444214, + "learning_rate": 0.0001933871320643011, + "loss": 0.3929, + "step": 2570 + }, + { + "epoch": 0.09961774585891348, + "grad_norm": 3.081062078475952, + "learning_rate": 0.00019336139104469927, + "loss": 0.569, + "step": 2580 + }, + { + "epoch": 0.10000386115294027, + "grad_norm": 4.728143215179443, + "learning_rate": 0.00019333565002509751, + "loss": 0.3443, + "step": 2590 + }, + { + "epoch": 0.10038997644696707, + "grad_norm": 2.4117422103881836, + "learning_rate": 0.00019330990900549573, + "loss": 0.4492, + "step": 2600 + }, + { + "epoch": 0.10077609174099386, + "grad_norm": 5.794168472290039, + "learning_rate": 0.00019328416798589391, + "loss": 0.5088, + "step": 2610 + }, + { + "epoch": 0.10116220703502066, + "grad_norm": 1.0662094354629517, + "learning_rate": 0.00019325842696629215, + "loss": 0.4683, + "step": 2620 + }, + { + "epoch": 0.10154832232904745, + "grad_norm": 2.13590931892395, + "learning_rate": 0.00019323268594669034, + "loss": 0.5833, + "step": 2630 + }, + { + "epoch": 0.10193443762307425, + "grad_norm": 7.52834415435791, + "learning_rate": 0.00019320694492708858, + "loss": 0.54, + "step": 2640 + }, + { + "epoch": 0.10232055291710104, + "grad_norm": 6.155847072601318, + "learning_rate": 0.00019318120390748677, + "loss": 0.6809, + "step": 2650 + }, + { + "epoch": 0.10270666821112784, + "grad_norm": 6.527890205383301, + "learning_rate": 0.000193155462887885, + "loss": 0.5239, + "step": 2660 + }, + { + "epoch": 0.10309278350515463, + "grad_norm": 3.3918986320495605, + "learning_rate": 0.00019312972186828322, + "loss": 0.4636, + "step": 2670 + }, + { + "epoch": 0.10347889879918143, + "grad_norm": 2.0933191776275635, + "learning_rate": 0.0001931039808486814, + "loss": 0.5288, + "step": 2680 + }, + { + "epoch": 0.10386501409320824, + "grad_norm": 2.4386465549468994, + "learning_rate": 0.00019307823982907965, + "loss": 0.5496, + "step": 2690 + }, + { + "epoch": 0.10425112938723503, + "grad_norm": 2.885315418243408, + "learning_rate": 0.00019305249880947783, + "loss": 0.5928, + "step": 2700 + }, + { + "epoch": 0.10463724468126183, + "grad_norm": 4.986598968505859, + "learning_rate": 0.00019302675778987607, + "loss": 0.3513, + "step": 2710 + }, + { + "epoch": 0.10502335997528862, + "grad_norm": 2.7999277114868164, + "learning_rate": 0.0001930010167702743, + "loss": 0.4484, + "step": 2720 + }, + { + "epoch": 0.10540947526931542, + "grad_norm": 1.6467676162719727, + "learning_rate": 0.0001929752757506725, + "loss": 0.4729, + "step": 2730 + }, + { + "epoch": 0.10579559056334222, + "grad_norm": 2.168477773666382, + "learning_rate": 0.00019294953473107071, + "loss": 0.3579, + "step": 2740 + }, + { + "epoch": 0.10618170585736901, + "grad_norm": 1.5439807176589966, + "learning_rate": 0.00019292379371146893, + "loss": 0.5715, + "step": 2750 + }, + { + "epoch": 0.1065678211513958, + "grad_norm": 1.2175947427749634, + "learning_rate": 0.00019289805269186714, + "loss": 0.4059, + "step": 2760 + }, + { + "epoch": 0.1069539364454226, + "grad_norm": 2.649782419204712, + "learning_rate": 0.00019287231167226533, + "loss": 0.7864, + "step": 2770 + }, + { + "epoch": 0.1073400517394494, + "grad_norm": 3.698002815246582, + "learning_rate": 0.00019284657065266357, + "loss": 0.5107, + "step": 2780 + }, + { + "epoch": 0.10772616703347619, + "grad_norm": 1.7418729066848755, + "learning_rate": 0.00019282082963306178, + "loss": 0.332, + "step": 2790 + }, + { + "epoch": 0.10811228232750299, + "grad_norm": 2.2264151573181152, + "learning_rate": 0.00019279508861346, + "loss": 0.3944, + "step": 2800 + }, + { + "epoch": 0.10849839762152978, + "grad_norm": 1.3407092094421387, + "learning_rate": 0.0001927693475938582, + "loss": 0.4603, + "step": 2810 + }, + { + "epoch": 0.10888451291555659, + "grad_norm": 0.3719189763069153, + "learning_rate": 0.00019274360657425642, + "loss": 0.496, + "step": 2820 + }, + { + "epoch": 0.10927062820958339, + "grad_norm": 4.728814125061035, + "learning_rate": 0.00019271786555465463, + "loss": 0.4079, + "step": 2830 + }, + { + "epoch": 0.10965674350361018, + "grad_norm": 5.007620334625244, + "learning_rate": 0.00019269212453505285, + "loss": 0.5221, + "step": 2840 + }, + { + "epoch": 0.11004285879763698, + "grad_norm": 2.7476320266723633, + "learning_rate": 0.00019266638351545106, + "loss": 0.3705, + "step": 2850 + }, + { + "epoch": 0.11042897409166377, + "grad_norm": 2.2960126399993896, + "learning_rate": 0.00019264064249584927, + "loss": 0.6656, + "step": 2860 + }, + { + "epoch": 0.11081508938569057, + "grad_norm": 0.9589812755584717, + "learning_rate": 0.0001926149014762475, + "loss": 0.6977, + "step": 2870 + }, + { + "epoch": 0.11120120467971736, + "grad_norm": 2.274984121322632, + "learning_rate": 0.0001925891604566457, + "loss": 0.4237, + "step": 2880 + }, + { + "epoch": 0.11158731997374416, + "grad_norm": 1.8849111795425415, + "learning_rate": 0.00019256341943704391, + "loss": 0.2357, + "step": 2890 + }, + { + "epoch": 0.11197343526777095, + "grad_norm": 2.7264740467071533, + "learning_rate": 0.00019253767841744213, + "loss": 0.3422, + "step": 2900 + }, + { + "epoch": 0.11235955056179775, + "grad_norm": 3.832827568054199, + "learning_rate": 0.00019251193739784034, + "loss": 0.3861, + "step": 2910 + }, + { + "epoch": 0.11274566585582455, + "grad_norm": 2.3612313270568848, + "learning_rate": 0.00019248619637823855, + "loss": 0.3299, + "step": 2920 + }, + { + "epoch": 0.11313178114985134, + "grad_norm": 2.2509396076202393, + "learning_rate": 0.00019246045535863677, + "loss": 0.6027, + "step": 2930 + }, + { + "epoch": 0.11351789644387814, + "grad_norm": 2.7600464820861816, + "learning_rate": 0.00019243471433903498, + "loss": 0.3864, + "step": 2940 + }, + { + "epoch": 0.11390401173790494, + "grad_norm": 5.956289768218994, + "learning_rate": 0.0001924089733194332, + "loss": 0.4669, + "step": 2950 + }, + { + "epoch": 0.11429012703193174, + "grad_norm": 4.651761531829834, + "learning_rate": 0.0001923832322998314, + "loss": 0.5774, + "step": 2960 + }, + { + "epoch": 0.11467624232595854, + "grad_norm": 1.1770590543746948, + "learning_rate": 0.00019235749128022962, + "loss": 0.3951, + "step": 2970 + }, + { + "epoch": 0.11506235761998533, + "grad_norm": 0.8117956519126892, + "learning_rate": 0.00019233175026062783, + "loss": 0.3919, + "step": 2980 + }, + { + "epoch": 0.11544847291401213, + "grad_norm": 1.318812608718872, + "learning_rate": 0.00019230600924102605, + "loss": 0.2905, + "step": 2990 + }, + { + "epoch": 0.11583458820803892, + "grad_norm": 1.981382966041565, + "learning_rate": 0.00019228026822142426, + "loss": 0.5312, + "step": 3000 + }, + { + "epoch": 0.11622070350206572, + "grad_norm": 1.9083003997802734, + "learning_rate": 0.00019225452720182247, + "loss": 0.3129, + "step": 3010 + }, + { + "epoch": 0.11660681879609251, + "grad_norm": 2.7581653594970703, + "learning_rate": 0.0001922287861822207, + "loss": 0.3954, + "step": 3020 + }, + { + "epoch": 0.11699293409011931, + "grad_norm": 1.065090537071228, + "learning_rate": 0.0001922030451626189, + "loss": 0.2722, + "step": 3030 + }, + { + "epoch": 0.1173790493841461, + "grad_norm": 0.600864827632904, + "learning_rate": 0.0001921773041430171, + "loss": 0.493, + "step": 3040 + }, + { + "epoch": 0.1177651646781729, + "grad_norm": 4.4449052810668945, + "learning_rate": 0.00019215156312341533, + "loss": 0.4982, + "step": 3050 + }, + { + "epoch": 0.1181512799721997, + "grad_norm": 3.34476637840271, + "learning_rate": 0.00019212582210381354, + "loss": 0.3204, + "step": 3060 + }, + { + "epoch": 0.11853739526622649, + "grad_norm": 1.7432445287704468, + "learning_rate": 0.00019210008108421175, + "loss": 0.6601, + "step": 3070 + }, + { + "epoch": 0.1189235105602533, + "grad_norm": 1.908324956893921, + "learning_rate": 0.00019207434006460997, + "loss": 0.5947, + "step": 3080 + }, + { + "epoch": 0.1193096258542801, + "grad_norm": 5.373056888580322, + "learning_rate": 0.00019204859904500818, + "loss": 0.5169, + "step": 3090 + }, + { + "epoch": 0.11969574114830689, + "grad_norm": 0.861535370349884, + "learning_rate": 0.0001920228580254064, + "loss": 0.3829, + "step": 3100 + }, + { + "epoch": 0.12008185644233368, + "grad_norm": 1.2700462341308594, + "learning_rate": 0.0001919971170058046, + "loss": 0.4475, + "step": 3110 + }, + { + "epoch": 0.12046797173636048, + "grad_norm": 2.9959444999694824, + "learning_rate": 0.00019197137598620282, + "loss": 0.4704, + "step": 3120 + }, + { + "epoch": 0.12085408703038727, + "grad_norm": 0.280109167098999, + "learning_rate": 0.00019194563496660103, + "loss": 0.3732, + "step": 3130 + }, + { + "epoch": 0.12124020232441407, + "grad_norm": 0.9746024012565613, + "learning_rate": 0.00019191989394699925, + "loss": 0.4693, + "step": 3140 + }, + { + "epoch": 0.12162631761844087, + "grad_norm": 1.7267721891403198, + "learning_rate": 0.00019189415292739746, + "loss": 0.4509, + "step": 3150 + }, + { + "epoch": 0.12201243291246766, + "grad_norm": 2.1759033203125, + "learning_rate": 0.00019186841190779567, + "loss": 0.428, + "step": 3160 + }, + { + "epoch": 0.12239854820649446, + "grad_norm": 1.270711064338684, + "learning_rate": 0.0001918426708881939, + "loss": 0.4262, + "step": 3170 + }, + { + "epoch": 0.12278466350052125, + "grad_norm": 3.7549123764038086, + "learning_rate": 0.0001918169298685921, + "loss": 0.4758, + "step": 3180 + }, + { + "epoch": 0.12317077879454805, + "grad_norm": 1.6550017595291138, + "learning_rate": 0.0001917911888489903, + "loss": 0.452, + "step": 3190 + }, + { + "epoch": 0.12355689408857484, + "grad_norm": 3.7151713371276855, + "learning_rate": 0.00019176544782938853, + "loss": 0.4844, + "step": 3200 + }, + { + "epoch": 0.12394300938260165, + "grad_norm": 0.5354440808296204, + "learning_rate": 0.00019173970680978674, + "loss": 0.4432, + "step": 3210 + }, + { + "epoch": 0.12432912467662845, + "grad_norm": 3.2494261264801025, + "learning_rate": 0.00019171396579018498, + "loss": 0.587, + "step": 3220 + }, + { + "epoch": 0.12471523997065524, + "grad_norm": 1.2129877805709839, + "learning_rate": 0.00019168822477058317, + "loss": 0.4662, + "step": 3230 + }, + { + "epoch": 0.12510135526468202, + "grad_norm": 3.723402500152588, + "learning_rate": 0.00019166248375098138, + "loss": 0.5261, + "step": 3240 + }, + { + "epoch": 0.12548747055870882, + "grad_norm": 1.596259593963623, + "learning_rate": 0.0001916367427313796, + "loss": 0.2802, + "step": 3250 + }, + { + "epoch": 0.12587358585273561, + "grad_norm": 5.5710320472717285, + "learning_rate": 0.0001916110017117778, + "loss": 0.5246, + "step": 3260 + }, + { + "epoch": 0.1262597011467624, + "grad_norm": 4.490183353424072, + "learning_rate": 0.00019158526069217602, + "loss": 0.4929, + "step": 3270 + }, + { + "epoch": 0.12664581644078923, + "grad_norm": 2.482572555541992, + "learning_rate": 0.00019155951967257423, + "loss": 0.3677, + "step": 3280 + }, + { + "epoch": 0.12703193173481603, + "grad_norm": 3.348520517349243, + "learning_rate": 0.00019153377865297247, + "loss": 0.6471, + "step": 3290 + }, + { + "epoch": 0.12741804702884282, + "grad_norm": 7.735306262969971, + "learning_rate": 0.00019150803763337066, + "loss": 0.6057, + "step": 3300 + }, + { + "epoch": 0.12780416232286962, + "grad_norm": 2.120649576187134, + "learning_rate": 0.00019148229661376887, + "loss": 0.5408, + "step": 3310 + }, + { + "epoch": 0.12819027761689641, + "grad_norm": 10.259540557861328, + "learning_rate": 0.00019145655559416709, + "loss": 0.4753, + "step": 3320 + }, + { + "epoch": 0.1285763929109232, + "grad_norm": 4.094576358795166, + "learning_rate": 0.0001914308145745653, + "loss": 0.3832, + "step": 3330 + }, + { + "epoch": 0.12896250820495, + "grad_norm": 3.1248559951782227, + "learning_rate": 0.00019140507355496354, + "loss": 0.5631, + "step": 3340 + }, + { + "epoch": 0.1293486234989768, + "grad_norm": 1.2975168228149414, + "learning_rate": 0.00019137933253536173, + "loss": 0.5158, + "step": 3350 + }, + { + "epoch": 0.1297347387930036, + "grad_norm": 3.2515244483947754, + "learning_rate": 0.00019135359151575997, + "loss": 0.4176, + "step": 3360 + }, + { + "epoch": 0.1301208540870304, + "grad_norm": 2.287757396697998, + "learning_rate": 0.00019132785049615815, + "loss": 0.5316, + "step": 3370 + }, + { + "epoch": 0.1305069693810572, + "grad_norm": 8.668967247009277, + "learning_rate": 0.00019130210947655637, + "loss": 0.6653, + "step": 3380 + }, + { + "epoch": 0.13089308467508398, + "grad_norm": 4.751536846160889, + "learning_rate": 0.00019127636845695458, + "loss": 0.4508, + "step": 3390 + }, + { + "epoch": 0.13127919996911078, + "grad_norm": 3.240792751312256, + "learning_rate": 0.0001912506274373528, + "loss": 0.441, + "step": 3400 + }, + { + "epoch": 0.13166531526313757, + "grad_norm": 2.146261215209961, + "learning_rate": 0.00019122488641775103, + "loss": 0.3394, + "step": 3410 + }, + { + "epoch": 0.13205143055716437, + "grad_norm": 2.259693145751953, + "learning_rate": 0.00019119914539814922, + "loss": 0.4348, + "step": 3420 + }, + { + "epoch": 0.13243754585119116, + "grad_norm": 1.8136098384857178, + "learning_rate": 0.00019117340437854746, + "loss": 0.4441, + "step": 3430 + }, + { + "epoch": 0.13282366114521796, + "grad_norm": 1.7324503660202026, + "learning_rate": 0.00019114766335894565, + "loss": 0.4725, + "step": 3440 + }, + { + "epoch": 0.13320977643924475, + "grad_norm": 4.709383487701416, + "learning_rate": 0.00019112192233934389, + "loss": 0.5383, + "step": 3450 + }, + { + "epoch": 0.13359589173327155, + "grad_norm": 0.3468118906021118, + "learning_rate": 0.00019109618131974207, + "loss": 0.3228, + "step": 3460 + }, + { + "epoch": 0.13398200702729834, + "grad_norm": 6.66448974609375, + "learning_rate": 0.00019107044030014029, + "loss": 0.4128, + "step": 3470 + }, + { + "epoch": 0.13436812232132514, + "grad_norm": 1.6971935033798218, + "learning_rate": 0.00019104469928053853, + "loss": 0.5061, + "step": 3480 + }, + { + "epoch": 0.13475423761535194, + "grad_norm": 0.9180198311805725, + "learning_rate": 0.0001910189582609367, + "loss": 0.4445, + "step": 3490 + }, + { + "epoch": 0.13514035290937873, + "grad_norm": 3.1244235038757324, + "learning_rate": 0.00019099321724133495, + "loss": 0.4099, + "step": 3500 + }, + { + "epoch": 0.13552646820340553, + "grad_norm": 2.7192864418029785, + "learning_rate": 0.00019096747622173314, + "loss": 0.5004, + "step": 3510 + }, + { + "epoch": 0.13591258349743232, + "grad_norm": 2.0905699729919434, + "learning_rate": 0.00019094173520213138, + "loss": 0.4277, + "step": 3520 + }, + { + "epoch": 0.13629869879145912, + "grad_norm": 3.3753092288970947, + "learning_rate": 0.0001909159941825296, + "loss": 0.4129, + "step": 3530 + }, + { + "epoch": 0.13668481408548594, + "grad_norm": 4.199211120605469, + "learning_rate": 0.00019089025316292778, + "loss": 0.5734, + "step": 3540 + }, + { + "epoch": 0.13707092937951273, + "grad_norm": 2.0411245822906494, + "learning_rate": 0.00019086451214332602, + "loss": 0.3018, + "step": 3550 + }, + { + "epoch": 0.13745704467353953, + "grad_norm": 17.236717224121094, + "learning_rate": 0.0001908387711237242, + "loss": 0.4527, + "step": 3560 + }, + { + "epoch": 0.13784315996756633, + "grad_norm": 1.4575644731521606, + "learning_rate": 0.00019081303010412245, + "loss": 0.3773, + "step": 3570 + }, + { + "epoch": 0.13822927526159312, + "grad_norm": 3.926090717315674, + "learning_rate": 0.00019078728908452063, + "loss": 0.5316, + "step": 3580 + }, + { + "epoch": 0.13861539055561992, + "grad_norm": 3.1841864585876465, + "learning_rate": 0.00019076154806491887, + "loss": 0.3705, + "step": 3590 + }, + { + "epoch": 0.1390015058496467, + "grad_norm": 4.08506441116333, + "learning_rate": 0.00019073580704531709, + "loss": 0.4941, + "step": 3600 + }, + { + "epoch": 0.1393876211436735, + "grad_norm": 3.063154458999634, + "learning_rate": 0.00019071006602571527, + "loss": 0.4435, + "step": 3610 + }, + { + "epoch": 0.1397737364377003, + "grad_norm": 6.122230529785156, + "learning_rate": 0.0001906843250061135, + "loss": 0.5067, + "step": 3620 + }, + { + "epoch": 0.1401598517317271, + "grad_norm": 3.3089540004730225, + "learning_rate": 0.0001906585839865117, + "loss": 0.4329, + "step": 3630 + }, + { + "epoch": 0.1405459670257539, + "grad_norm": 1.7245008945465088, + "learning_rate": 0.00019063284296690994, + "loss": 0.4502, + "step": 3640 + }, + { + "epoch": 0.1409320823197807, + "grad_norm": 1.7759568691253662, + "learning_rate": 0.00019060710194730813, + "loss": 0.2379, + "step": 3650 + }, + { + "epoch": 0.14131819761380748, + "grad_norm": 0.432452529668808, + "learning_rate": 0.00019058136092770637, + "loss": 0.4277, + "step": 3660 + }, + { + "epoch": 0.14170431290783428, + "grad_norm": 3.311952829360962, + "learning_rate": 0.00019055561990810458, + "loss": 0.4558, + "step": 3670 + }, + { + "epoch": 0.14209042820186107, + "grad_norm": 1.9942964315414429, + "learning_rate": 0.00019052987888850277, + "loss": 0.3349, + "step": 3680 + }, + { + "epoch": 0.14247654349588787, + "grad_norm": 6.226424217224121, + "learning_rate": 0.000190504137868901, + "loss": 0.5809, + "step": 3690 + }, + { + "epoch": 0.14286265878991466, + "grad_norm": 6.223634719848633, + "learning_rate": 0.0001904783968492992, + "loss": 0.5788, + "step": 3700 + }, + { + "epoch": 0.14324877408394146, + "grad_norm": 0.7370914220809937, + "learning_rate": 0.00019045265582969743, + "loss": 0.4834, + "step": 3710 + }, + { + "epoch": 0.14363488937796826, + "grad_norm": 1.745880365371704, + "learning_rate": 0.00019042691481009565, + "loss": 0.6995, + "step": 3720 + }, + { + "epoch": 0.14402100467199505, + "grad_norm": 0.8839595913887024, + "learning_rate": 0.00019040117379049386, + "loss": 0.3526, + "step": 3730 + }, + { + "epoch": 0.14440711996602185, + "grad_norm": 1.1224008798599243, + "learning_rate": 0.00019037543277089207, + "loss": 0.3558, + "step": 3740 + }, + { + "epoch": 0.14479323526004864, + "grad_norm": 1.0473041534423828, + "learning_rate": 0.00019034969175129026, + "loss": 0.2465, + "step": 3750 + }, + { + "epoch": 0.14517935055407544, + "grad_norm": 3.83192777633667, + "learning_rate": 0.0001903239507316885, + "loss": 0.4832, + "step": 3760 + }, + { + "epoch": 0.14556546584810223, + "grad_norm": 3.323885440826416, + "learning_rate": 0.00019029820971208669, + "loss": 0.4924, + "step": 3770 + }, + { + "epoch": 0.14595158114212903, + "grad_norm": 3.2334187030792236, + "learning_rate": 0.00019027246869248493, + "loss": 0.5053, + "step": 3780 + }, + { + "epoch": 0.14633769643615582, + "grad_norm": 2.280498743057251, + "learning_rate": 0.00019024672767288314, + "loss": 0.554, + "step": 3790 + }, + { + "epoch": 0.14672381173018265, + "grad_norm": 4.546648979187012, + "learning_rate": 0.00019022098665328135, + "loss": 0.3999, + "step": 3800 + }, + { + "epoch": 0.14710992702420944, + "grad_norm": 0.6303244829177856, + "learning_rate": 0.00019019524563367957, + "loss": 0.4481, + "step": 3810 + }, + { + "epoch": 0.14749604231823624, + "grad_norm": 2.605196475982666, + "learning_rate": 0.00019016950461407775, + "loss": 0.3561, + "step": 3820 + }, + { + "epoch": 0.14788215761226303, + "grad_norm": 3.0562639236450195, + "learning_rate": 0.000190143763594476, + "loss": 0.5903, + "step": 3830 + }, + { + "epoch": 0.14826827290628983, + "grad_norm": 11.164155006408691, + "learning_rate": 0.0001901180225748742, + "loss": 0.4299, + "step": 3840 + }, + { + "epoch": 0.14865438820031662, + "grad_norm": 4.996811866760254, + "learning_rate": 0.00019009228155527242, + "loss": 0.4423, + "step": 3850 + }, + { + "epoch": 0.14904050349434342, + "grad_norm": 2.627272844314575, + "learning_rate": 0.00019006654053567063, + "loss": 0.4875, + "step": 3860 + }, + { + "epoch": 0.1494266187883702, + "grad_norm": 2.6532809734344482, + "learning_rate": 0.00019004079951606885, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.149812734082397, + "grad_norm": 5.821976661682129, + "learning_rate": 0.00019001505849646706, + "loss": 0.4793, + "step": 3880 + }, + { + "epoch": 0.1501988493764238, + "grad_norm": 2.888029098510742, + "learning_rate": 0.00018998931747686524, + "loss": 0.5784, + "step": 3890 + }, + { + "epoch": 0.1505849646704506, + "grad_norm": 0.9147624969482422, + "learning_rate": 0.00018996357645726349, + "loss": 0.5533, + "step": 3900 + }, + { + "epoch": 0.1509710799644774, + "grad_norm": 2.6088199615478516, + "learning_rate": 0.0001899378354376617, + "loss": 0.5028, + "step": 3910 + }, + { + "epoch": 0.1513571952585042, + "grad_norm": 3.8208296298980713, + "learning_rate": 0.0001899120944180599, + "loss": 0.4934, + "step": 3920 + }, + { + "epoch": 0.15174331055253099, + "grad_norm": 2.8711328506469727, + "learning_rate": 0.00018988635339845813, + "loss": 0.4417, + "step": 3930 + }, + { + "epoch": 0.15212942584655778, + "grad_norm": 2.922855854034424, + "learning_rate": 0.00018986061237885634, + "loss": 0.5303, + "step": 3940 + }, + { + "epoch": 0.15251554114058458, + "grad_norm": 2.52575945854187, + "learning_rate": 0.00018983487135925455, + "loss": 0.397, + "step": 3950 + }, + { + "epoch": 0.15290165643461137, + "grad_norm": 3.3369996547698975, + "learning_rate": 0.00018980913033965277, + "loss": 0.4172, + "step": 3960 + }, + { + "epoch": 0.15328777172863817, + "grad_norm": 1.7678214311599731, + "learning_rate": 0.00018978338932005098, + "loss": 0.3122, + "step": 3970 + }, + { + "epoch": 0.15367388702266496, + "grad_norm": 3.3293211460113525, + "learning_rate": 0.0001897576483004492, + "loss": 0.6864, + "step": 3980 + }, + { + "epoch": 0.15406000231669176, + "grad_norm": 1.4911530017852783, + "learning_rate": 0.0001897319072808474, + "loss": 0.3888, + "step": 3990 + }, + { + "epoch": 0.15444611761071855, + "grad_norm": 1.4884055852890015, + "learning_rate": 0.00018970616626124562, + "loss": 0.3952, + "step": 4000 + }, + { + "epoch": 0.15483223290474535, + "grad_norm": 1.2745383977890015, + "learning_rate": 0.00018968042524164383, + "loss": 0.3647, + "step": 4010 + }, + { + "epoch": 0.15521834819877214, + "grad_norm": 7.799386024475098, + "learning_rate": 0.00018965468422204205, + "loss": 0.5554, + "step": 4020 + }, + { + "epoch": 0.15560446349279894, + "grad_norm": 2.4778294563293457, + "learning_rate": 0.00018962894320244026, + "loss": 0.662, + "step": 4030 + }, + { + "epoch": 0.15599057878682573, + "grad_norm": 0.8415629267692566, + "learning_rate": 0.00018960320218283847, + "loss": 0.4317, + "step": 4040 + }, + { + "epoch": 0.15637669408085253, + "grad_norm": 4.507715702056885, + "learning_rate": 0.00018957746116323669, + "loss": 0.4512, + "step": 4050 + }, + { + "epoch": 0.15676280937487935, + "grad_norm": 3.5790421962738037, + "learning_rate": 0.0001895517201436349, + "loss": 0.4022, + "step": 4060 + }, + { + "epoch": 0.15714892466890615, + "grad_norm": 3.7266156673431396, + "learning_rate": 0.0001895259791240331, + "loss": 0.3945, + "step": 4070 + }, + { + "epoch": 0.15753503996293294, + "grad_norm": 7.909580230712891, + "learning_rate": 0.00018950023810443133, + "loss": 0.3726, + "step": 4080 + }, + { + "epoch": 0.15792115525695974, + "grad_norm": 2.2439534664154053, + "learning_rate": 0.00018947449708482954, + "loss": 0.4157, + "step": 4090 + }, + { + "epoch": 0.15830727055098653, + "grad_norm": 1.6076972484588623, + "learning_rate": 0.00018944875606522775, + "loss": 0.2363, + "step": 4100 + }, + { + "epoch": 0.15869338584501333, + "grad_norm": 3.7495157718658447, + "learning_rate": 0.00018942301504562596, + "loss": 0.4908, + "step": 4110 + }, + { + "epoch": 0.15907950113904012, + "grad_norm": 0.2942291796207428, + "learning_rate": 0.00018939727402602418, + "loss": 0.4915, + "step": 4120 + }, + { + "epoch": 0.15946561643306692, + "grad_norm": 1.3951829671859741, + "learning_rate": 0.0001893715330064224, + "loss": 0.4585, + "step": 4130 + }, + { + "epoch": 0.15985173172709372, + "grad_norm": 0.4405671954154968, + "learning_rate": 0.0001893457919868206, + "loss": 0.2839, + "step": 4140 + }, + { + "epoch": 0.1602378470211205, + "grad_norm": 1.0917588472366333, + "learning_rate": 0.00018932005096721882, + "loss": 0.401, + "step": 4150 + }, + { + "epoch": 0.1606239623151473, + "grad_norm": 1.6183397769927979, + "learning_rate": 0.00018929430994761703, + "loss": 0.5555, + "step": 4160 + }, + { + "epoch": 0.1610100776091741, + "grad_norm": 2.0909583568573, + "learning_rate": 0.00018926856892801524, + "loss": 0.52, + "step": 4170 + }, + { + "epoch": 0.1613961929032009, + "grad_norm": 2.901456356048584, + "learning_rate": 0.00018924282790841346, + "loss": 0.603, + "step": 4180 + }, + { + "epoch": 0.1617823081972277, + "grad_norm": 7.230431079864502, + "learning_rate": 0.00018921708688881167, + "loss": 0.6189, + "step": 4190 + }, + { + "epoch": 0.1621684234912545, + "grad_norm": 6.773900508880615, + "learning_rate": 0.00018919134586920988, + "loss": 0.2494, + "step": 4200 + }, + { + "epoch": 0.16255453878528128, + "grad_norm": 0.8557988405227661, + "learning_rate": 0.0001891656048496081, + "loss": 0.2617, + "step": 4210 + }, + { + "epoch": 0.16294065407930808, + "grad_norm": 1.3747268915176392, + "learning_rate": 0.00018913986383000634, + "loss": 0.4189, + "step": 4220 + }, + { + "epoch": 0.16332676937333487, + "grad_norm": 4.072261810302734, + "learning_rate": 0.00018911412281040452, + "loss": 0.5473, + "step": 4230 + }, + { + "epoch": 0.16371288466736167, + "grad_norm": 2.7210185527801514, + "learning_rate": 0.00018908838179080274, + "loss": 0.3501, + "step": 4240 + }, + { + "epoch": 0.16409899996138846, + "grad_norm": 2.276454448699951, + "learning_rate": 0.00018906264077120095, + "loss": 0.3078, + "step": 4250 + }, + { + "epoch": 0.16448511525541526, + "grad_norm": 3.586536169052124, + "learning_rate": 0.00018903689975159916, + "loss": 0.3856, + "step": 4260 + }, + { + "epoch": 0.16487123054944205, + "grad_norm": 2.199673891067505, + "learning_rate": 0.00018901115873199738, + "loss": 0.3677, + "step": 4270 + }, + { + "epoch": 0.16525734584346885, + "grad_norm": 2.8410561084747314, + "learning_rate": 0.0001889854177123956, + "loss": 0.6101, + "step": 4280 + }, + { + "epoch": 0.16564346113749565, + "grad_norm": 3.9638853073120117, + "learning_rate": 0.00018895967669279383, + "loss": 0.5066, + "step": 4290 + }, + { + "epoch": 0.16602957643152244, + "grad_norm": 1.2070738077163696, + "learning_rate": 0.00018893393567319202, + "loss": 0.385, + "step": 4300 + }, + { + "epoch": 0.16641569172554924, + "grad_norm": 1.0531187057495117, + "learning_rate": 0.00018890819465359023, + "loss": 0.3608, + "step": 4310 + }, + { + "epoch": 0.16680180701957603, + "grad_norm": 1.1998246908187866, + "learning_rate": 0.00018888245363398844, + "loss": 0.4624, + "step": 4320 + }, + { + "epoch": 0.16718792231360285, + "grad_norm": 2.126063346862793, + "learning_rate": 0.00018885671261438666, + "loss": 0.6076, + "step": 4330 + }, + { + "epoch": 0.16757403760762965, + "grad_norm": 1.5854765176773071, + "learning_rate": 0.0001888309715947849, + "loss": 0.4817, + "step": 4340 + }, + { + "epoch": 0.16796015290165645, + "grad_norm": 6.630712509155273, + "learning_rate": 0.00018880523057518308, + "loss": 0.4098, + "step": 4350 + }, + { + "epoch": 0.16834626819568324, + "grad_norm": 2.060789108276367, + "learning_rate": 0.00018877948955558132, + "loss": 0.3523, + "step": 4360 + }, + { + "epoch": 0.16873238348971004, + "grad_norm": 2.2551252841949463, + "learning_rate": 0.0001887537485359795, + "loss": 0.3095, + "step": 4370 + }, + { + "epoch": 0.16911849878373683, + "grad_norm": 3.736640453338623, + "learning_rate": 0.00018872800751637772, + "loss": 0.3812, + "step": 4380 + }, + { + "epoch": 0.16950461407776363, + "grad_norm": 1.9971100091934204, + "learning_rate": 0.00018870226649677594, + "loss": 0.3422, + "step": 4390 + }, + { + "epoch": 0.16989072937179042, + "grad_norm": 3.6577255725860596, + "learning_rate": 0.00018867652547717415, + "loss": 0.7857, + "step": 4400 + }, + { + "epoch": 0.17027684466581722, + "grad_norm": 2.166538715362549, + "learning_rate": 0.0001886507844575724, + "loss": 0.5596, + "step": 4410 + }, + { + "epoch": 0.170662959959844, + "grad_norm": 2.0177736282348633, + "learning_rate": 0.00018862504343797058, + "loss": 0.3197, + "step": 4420 + }, + { + "epoch": 0.1710490752538708, + "grad_norm": 0.29447808861732483, + "learning_rate": 0.00018859930241836882, + "loss": 0.5284, + "step": 4430 + }, + { + "epoch": 0.1714351905478976, + "grad_norm": 2.17985200881958, + "learning_rate": 0.000188573561398767, + "loss": 0.5188, + "step": 4440 + }, + { + "epoch": 0.1718213058419244, + "grad_norm": 2.87449049949646, + "learning_rate": 0.00018854782037916522, + "loss": 0.554, + "step": 4450 + }, + { + "epoch": 0.1722074211359512, + "grad_norm": 1.8865265846252441, + "learning_rate": 0.00018852207935956343, + "loss": 0.4338, + "step": 4460 + }, + { + "epoch": 0.172593536429978, + "grad_norm": 2.042337417602539, + "learning_rate": 0.00018849633833996164, + "loss": 0.3924, + "step": 4470 + }, + { + "epoch": 0.17297965172400478, + "grad_norm": 1.4254354238510132, + "learning_rate": 0.00018847059732035988, + "loss": 0.2607, + "step": 4480 + }, + { + "epoch": 0.17336576701803158, + "grad_norm": 2.611560344696045, + "learning_rate": 0.00018844485630075807, + "loss": 0.4967, + "step": 4490 + }, + { + "epoch": 0.17375188231205838, + "grad_norm": 1.1008936166763306, + "learning_rate": 0.0001884191152811563, + "loss": 0.4109, + "step": 4500 + }, + { + "epoch": 0.17413799760608517, + "grad_norm": 0.8280178308486938, + "learning_rate": 0.0001883933742615545, + "loss": 0.6632, + "step": 4510 + }, + { + "epoch": 0.17452411290011197, + "grad_norm": 2.226020336151123, + "learning_rate": 0.0001883676332419527, + "loss": 0.4777, + "step": 4520 + }, + { + "epoch": 0.17491022819413876, + "grad_norm": 1.6062042713165283, + "learning_rate": 0.00018834189222235095, + "loss": 0.4671, + "step": 4530 + }, + { + "epoch": 0.17529634348816556, + "grad_norm": 3.9853012561798096, + "learning_rate": 0.00018831615120274914, + "loss": 0.4843, + "step": 4540 + }, + { + "epoch": 0.17568245878219235, + "grad_norm": 0.30268657207489014, + "learning_rate": 0.00018829041018314738, + "loss": 0.3922, + "step": 4550 + }, + { + "epoch": 0.17606857407621915, + "grad_norm": 6.283960342407227, + "learning_rate": 0.00018826466916354556, + "loss": 0.6106, + "step": 4560 + }, + { + "epoch": 0.17645468937024594, + "grad_norm": 1.4164658784866333, + "learning_rate": 0.0001882389281439438, + "loss": 0.3014, + "step": 4570 + }, + { + "epoch": 0.17684080466427274, + "grad_norm": 4.847668170928955, + "learning_rate": 0.000188213187124342, + "loss": 0.5216, + "step": 4580 + }, + { + "epoch": 0.17722691995829956, + "grad_norm": 3.683180332183838, + "learning_rate": 0.0001881874461047402, + "loss": 0.3268, + "step": 4590 + }, + { + "epoch": 0.17761303525232636, + "grad_norm": 1.053144097328186, + "learning_rate": 0.00018816170508513844, + "loss": 0.5229, + "step": 4600 + }, + { + "epoch": 0.17799915054635315, + "grad_norm": 0.29438719153404236, + "learning_rate": 0.00018813596406553663, + "loss": 0.4523, + "step": 4610 + }, + { + "epoch": 0.17838526584037995, + "grad_norm": 1.5682024955749512, + "learning_rate": 0.00018811022304593487, + "loss": 0.4367, + "step": 4620 + }, + { + "epoch": 0.17877138113440674, + "grad_norm": 1.462189793586731, + "learning_rate": 0.00018808448202633306, + "loss": 0.5086, + "step": 4630 + }, + { + "epoch": 0.17915749642843354, + "grad_norm": 0.7927210927009583, + "learning_rate": 0.0001880587410067313, + "loss": 0.4654, + "step": 4640 + }, + { + "epoch": 0.17954361172246033, + "grad_norm": 1.4543548822402954, + "learning_rate": 0.0001880329999871295, + "loss": 0.5005, + "step": 4650 + }, + { + "epoch": 0.17992972701648713, + "grad_norm": 1.5814868211746216, + "learning_rate": 0.00018800725896752772, + "loss": 0.4127, + "step": 4660 + }, + { + "epoch": 0.18031584231051392, + "grad_norm": 1.9244798421859741, + "learning_rate": 0.00018798151794792594, + "loss": 0.3796, + "step": 4670 + }, + { + "epoch": 0.18070195760454072, + "grad_norm": 1.8725996017456055, + "learning_rate": 0.00018795577692832412, + "loss": 0.4112, + "step": 4680 + }, + { + "epoch": 0.18108807289856751, + "grad_norm": 2.8138442039489746, + "learning_rate": 0.00018793003590872236, + "loss": 0.6117, + "step": 4690 + }, + { + "epoch": 0.1814741881925943, + "grad_norm": 3.4465060234069824, + "learning_rate": 0.00018790429488912055, + "loss": 0.4223, + "step": 4700 + }, + { + "epoch": 0.1818603034866211, + "grad_norm": 4.431785583496094, + "learning_rate": 0.0001878785538695188, + "loss": 0.54, + "step": 4710 + }, + { + "epoch": 0.1822464187806479, + "grad_norm": 6.951846599578857, + "learning_rate": 0.000187852812849917, + "loss": 0.3702, + "step": 4720 + }, + { + "epoch": 0.1826325340746747, + "grad_norm": 1.0188024044036865, + "learning_rate": 0.00018782707183031522, + "loss": 0.2715, + "step": 4730 + }, + { + "epoch": 0.1830186493687015, + "grad_norm": 0.3875834047794342, + "learning_rate": 0.00018780133081071343, + "loss": 0.4208, + "step": 4740 + }, + { + "epoch": 0.1834047646627283, + "grad_norm": 2.7475740909576416, + "learning_rate": 0.00018777558979111162, + "loss": 0.3613, + "step": 4750 + }, + { + "epoch": 0.18379087995675508, + "grad_norm": 2.553227186203003, + "learning_rate": 0.00018774984877150986, + "loss": 0.4781, + "step": 4760 + }, + { + "epoch": 0.18417699525078188, + "grad_norm": 2.005154609680176, + "learning_rate": 0.00018772410775190804, + "loss": 0.3805, + "step": 4770 + }, + { + "epoch": 0.18456311054480867, + "grad_norm": 0.7380127310752869, + "learning_rate": 0.00018769836673230628, + "loss": 0.3679, + "step": 4780 + }, + { + "epoch": 0.18494922583883547, + "grad_norm": 3.6547505855560303, + "learning_rate": 0.0001876726257127045, + "loss": 0.4502, + "step": 4790 + }, + { + "epoch": 0.18533534113286226, + "grad_norm": 2.232980728149414, + "learning_rate": 0.0001876468846931027, + "loss": 0.4628, + "step": 4800 + }, + { + "epoch": 0.18572145642688906, + "grad_norm": 6.521275043487549, + "learning_rate": 0.00018762114367350092, + "loss": 0.4765, + "step": 4810 + }, + { + "epoch": 0.18610757172091585, + "grad_norm": 1.6310979127883911, + "learning_rate": 0.0001875954026538991, + "loss": 0.4039, + "step": 4820 + }, + { + "epoch": 0.18649368701494265, + "grad_norm": 1.1469775438308716, + "learning_rate": 0.00018756966163429735, + "loss": 0.4195, + "step": 4830 + }, + { + "epoch": 0.18687980230896944, + "grad_norm": 0.7688332200050354, + "learning_rate": 0.00018754392061469556, + "loss": 0.264, + "step": 4840 + }, + { + "epoch": 0.18726591760299627, + "grad_norm": 3.3422155380249023, + "learning_rate": 0.00018751817959509378, + "loss": 0.5275, + "step": 4850 + }, + { + "epoch": 0.18765203289702306, + "grad_norm": 1.517876386642456, + "learning_rate": 0.000187492438575492, + "loss": 0.4567, + "step": 4860 + }, + { + "epoch": 0.18803814819104986, + "grad_norm": 1.2196050882339478, + "learning_rate": 0.0001874666975558902, + "loss": 0.4231, + "step": 4870 + }, + { + "epoch": 0.18842426348507665, + "grad_norm": 1.3325402736663818, + "learning_rate": 0.00018744095653628842, + "loss": 0.6325, + "step": 4880 + }, + { + "epoch": 0.18881037877910345, + "grad_norm": 6.098769664764404, + "learning_rate": 0.0001874152155166866, + "loss": 0.576, + "step": 4890 + }, + { + "epoch": 0.18919649407313024, + "grad_norm": 2.602363348007202, + "learning_rate": 0.00018738947449708484, + "loss": 0.3237, + "step": 4900 + }, + { + "epoch": 0.18958260936715704, + "grad_norm": 0.970106303691864, + "learning_rate": 0.00018736373347748306, + "loss": 0.409, + "step": 4910 + }, + { + "epoch": 0.18996872466118384, + "grad_norm": 3.2592012882232666, + "learning_rate": 0.00018733799245788127, + "loss": 0.408, + "step": 4920 + }, + { + "epoch": 0.19035483995521063, + "grad_norm": 0.31132128834724426, + "learning_rate": 0.00018731225143827948, + "loss": 0.2446, + "step": 4930 + }, + { + "epoch": 0.19074095524923743, + "grad_norm": 5.321741104125977, + "learning_rate": 0.0001872865104186777, + "loss": 0.4604, + "step": 4940 + }, + { + "epoch": 0.19112707054326422, + "grad_norm": 1.1165122985839844, + "learning_rate": 0.0001872607693990759, + "loss": 0.3605, + "step": 4950 + }, + { + "epoch": 0.19151318583729102, + "grad_norm": 0.8274110555648804, + "learning_rate": 0.0001872350283794741, + "loss": 0.2669, + "step": 4960 + }, + { + "epoch": 0.1918993011313178, + "grad_norm": 2.8668346405029297, + "learning_rate": 0.00018720928735987234, + "loss": 0.4055, + "step": 4970 + }, + { + "epoch": 0.1922854164253446, + "grad_norm": 3.411841630935669, + "learning_rate": 0.00018718354634027055, + "loss": 0.5989, + "step": 4980 + }, + { + "epoch": 0.1926715317193714, + "grad_norm": 0.18740829825401306, + "learning_rate": 0.00018715780532066876, + "loss": 0.3805, + "step": 4990 + }, + { + "epoch": 0.1930576470133982, + "grad_norm": 1.0823473930358887, + "learning_rate": 0.00018713206430106698, + "loss": 0.2854, + "step": 5000 + }, + { + "epoch": 0.193443762307425, + "grad_norm": 1.9816405773162842, + "learning_rate": 0.0001871063232814652, + "loss": 0.3771, + "step": 5010 + }, + { + "epoch": 0.1938298776014518, + "grad_norm": 5.267081260681152, + "learning_rate": 0.0001870805822618634, + "loss": 0.3085, + "step": 5020 + }, + { + "epoch": 0.19421599289547858, + "grad_norm": 5.706038475036621, + "learning_rate": 0.00018705484124226162, + "loss": 0.484, + "step": 5030 + }, + { + "epoch": 0.19460210818950538, + "grad_norm": 1.3357723951339722, + "learning_rate": 0.00018702910022265983, + "loss": 0.2161, + "step": 5040 + }, + { + "epoch": 0.19498822348353217, + "grad_norm": 1.0626447200775146, + "learning_rate": 0.00018700335920305804, + "loss": 0.3491, + "step": 5050 + }, + { + "epoch": 0.19537433877755897, + "grad_norm": 2.441228151321411, + "learning_rate": 0.00018697761818345626, + "loss": 0.3975, + "step": 5060 + }, + { + "epoch": 0.19576045407158577, + "grad_norm": 2.6739327907562256, + "learning_rate": 0.00018695187716385447, + "loss": 0.3418, + "step": 5070 + }, + { + "epoch": 0.19614656936561256, + "grad_norm": 2.3216919898986816, + "learning_rate": 0.00018692613614425268, + "loss": 0.6265, + "step": 5080 + }, + { + "epoch": 0.19653268465963936, + "grad_norm": 3.9119021892547607, + "learning_rate": 0.0001869003951246509, + "loss": 0.2982, + "step": 5090 + }, + { + "epoch": 0.19691879995366615, + "grad_norm": 5.744061470031738, + "learning_rate": 0.0001868746541050491, + "loss": 0.4048, + "step": 5100 + }, + { + "epoch": 0.19730491524769297, + "grad_norm": 8.512910842895508, + "learning_rate": 0.00018684891308544732, + "loss": 0.3598, + "step": 5110 + }, + { + "epoch": 0.19769103054171977, + "grad_norm": 1.6382296085357666, + "learning_rate": 0.00018682317206584554, + "loss": 0.2121, + "step": 5120 + }, + { + "epoch": 0.19807714583574657, + "grad_norm": 2.1593070030212402, + "learning_rate": 0.00018679743104624375, + "loss": 0.4914, + "step": 5130 + }, + { + "epoch": 0.19846326112977336, + "grad_norm": 3.067112445831299, + "learning_rate": 0.00018677169002664196, + "loss": 0.4171, + "step": 5140 + }, + { + "epoch": 0.19884937642380016, + "grad_norm": 1.9954415559768677, + "learning_rate": 0.00018674594900704018, + "loss": 0.5161, + "step": 5150 + }, + { + "epoch": 0.19923549171782695, + "grad_norm": 2.793346643447876, + "learning_rate": 0.0001867202079874384, + "loss": 0.4159, + "step": 5160 + }, + { + "epoch": 0.19962160701185375, + "grad_norm": 4.087403774261475, + "learning_rate": 0.0001866944669678366, + "loss": 0.3339, + "step": 5170 + }, + { + "epoch": 0.20000772230588054, + "grad_norm": 2.10153865814209, + "learning_rate": 0.00018666872594823482, + "loss": 0.4352, + "step": 5180 + }, + { + "epoch": 0.20039383759990734, + "grad_norm": 2.947117805480957, + "learning_rate": 0.00018664298492863303, + "loss": 0.305, + "step": 5190 + }, + { + "epoch": 0.20077995289393413, + "grad_norm": 1.2496302127838135, + "learning_rate": 0.00018661724390903124, + "loss": 0.4578, + "step": 5200 + }, + { + "epoch": 0.20116606818796093, + "grad_norm": 0.5246118903160095, + "learning_rate": 0.00018659150288942946, + "loss": 0.7531, + "step": 5210 + }, + { + "epoch": 0.20155218348198772, + "grad_norm": 4.099668502807617, + "learning_rate": 0.00018656576186982767, + "loss": 0.3809, + "step": 5220 + }, + { + "epoch": 0.20193829877601452, + "grad_norm": 4.237419128417969, + "learning_rate": 0.00018654002085022588, + "loss": 0.3169, + "step": 5230 + }, + { + "epoch": 0.20232441407004131, + "grad_norm": 1.6228466033935547, + "learning_rate": 0.0001865142798306241, + "loss": 0.5832, + "step": 5240 + }, + { + "epoch": 0.2027105293640681, + "grad_norm": 4.567386627197266, + "learning_rate": 0.0001864885388110223, + "loss": 0.2177, + "step": 5250 + }, + { + "epoch": 0.2030966446580949, + "grad_norm": 1.4991040229797363, + "learning_rate": 0.00018646279779142052, + "loss": 0.3851, + "step": 5260 + }, + { + "epoch": 0.2034827599521217, + "grad_norm": 2.127082586288452, + "learning_rate": 0.00018643705677181874, + "loss": 0.4721, + "step": 5270 + }, + { + "epoch": 0.2038688752461485, + "grad_norm": 2.9149303436279297, + "learning_rate": 0.00018641131575221695, + "loss": 0.2556, + "step": 5280 + }, + { + "epoch": 0.2042549905401753, + "grad_norm": 0.06375914812088013, + "learning_rate": 0.00018638557473261516, + "loss": 0.3599, + "step": 5290 + }, + { + "epoch": 0.20464110583420209, + "grad_norm": 3.338331699371338, + "learning_rate": 0.00018635983371301338, + "loss": 0.4062, + "step": 5300 + }, + { + "epoch": 0.20502722112822888, + "grad_norm": 4.006681442260742, + "learning_rate": 0.0001863340926934116, + "loss": 0.4538, + "step": 5310 + }, + { + "epoch": 0.20541333642225568, + "grad_norm": 1.1406009197235107, + "learning_rate": 0.0001863083516738098, + "loss": 0.8432, + "step": 5320 + }, + { + "epoch": 0.20579945171628247, + "grad_norm": 9.281437873840332, + "learning_rate": 0.00018628261065420802, + "loss": 0.4538, + "step": 5330 + }, + { + "epoch": 0.20618556701030927, + "grad_norm": 3.1884214878082275, + "learning_rate": 0.00018625686963460626, + "loss": 0.3361, + "step": 5340 + }, + { + "epoch": 0.20657168230433606, + "grad_norm": 1.4311977624893188, + "learning_rate": 0.00018623112861500444, + "loss": 0.5519, + "step": 5350 + }, + { + "epoch": 0.20695779759836286, + "grad_norm": 3.574361801147461, + "learning_rate": 0.00018620538759540266, + "loss": 0.518, + "step": 5360 + }, + { + "epoch": 0.20734391289238968, + "grad_norm": 3.0186073780059814, + "learning_rate": 0.00018617964657580087, + "loss": 0.4204, + "step": 5370 + }, + { + "epoch": 0.20773002818641648, + "grad_norm": 2.832859754562378, + "learning_rate": 0.00018615390555619908, + "loss": 0.5736, + "step": 5380 + }, + { + "epoch": 0.20811614348044327, + "grad_norm": 2.2258200645446777, + "learning_rate": 0.0001861281645365973, + "loss": 0.8194, + "step": 5390 + }, + { + "epoch": 0.20850225877447007, + "grad_norm": 1.0975148677825928, + "learning_rate": 0.0001861024235169955, + "loss": 0.5235, + "step": 5400 + }, + { + "epoch": 0.20888837406849686, + "grad_norm": 2.597329616546631, + "learning_rate": 0.00018607668249739375, + "loss": 0.2798, + "step": 5410 + }, + { + "epoch": 0.20927448936252366, + "grad_norm": 1.3780876398086548, + "learning_rate": 0.00018605094147779194, + "loss": 0.4046, + "step": 5420 + }, + { + "epoch": 0.20966060465655045, + "grad_norm": 2.409886598587036, + "learning_rate": 0.00018602520045819018, + "loss": 0.3243, + "step": 5430 + }, + { + "epoch": 0.21004671995057725, + "grad_norm": 1.0368077754974365, + "learning_rate": 0.00018599945943858836, + "loss": 0.4469, + "step": 5440 + }, + { + "epoch": 0.21043283524460404, + "grad_norm": 2.961658000946045, + "learning_rate": 0.00018597371841898658, + "loss": 0.5104, + "step": 5450 + }, + { + "epoch": 0.21081895053863084, + "grad_norm": 1.1599836349487305, + "learning_rate": 0.00018594797739938482, + "loss": 0.3422, + "step": 5460 + }, + { + "epoch": 0.21120506583265763, + "grad_norm": 3.293682336807251, + "learning_rate": 0.000185922236379783, + "loss": 0.3556, + "step": 5470 + }, + { + "epoch": 0.21159118112668443, + "grad_norm": 1.6923863887786865, + "learning_rate": 0.00018589649536018124, + "loss": 0.3084, + "step": 5480 + }, + { + "epoch": 0.21197729642071123, + "grad_norm": 3.7289531230926514, + "learning_rate": 0.00018587075434057943, + "loss": 0.4668, + "step": 5490 + }, + { + "epoch": 0.21236341171473802, + "grad_norm": 1.3744993209838867, + "learning_rate": 0.00018584501332097767, + "loss": 0.2984, + "step": 5500 + }, + { + "epoch": 0.21274952700876482, + "grad_norm": 1.4377775192260742, + "learning_rate": 0.00018581927230137586, + "loss": 0.2622, + "step": 5510 + }, + { + "epoch": 0.2131356423027916, + "grad_norm": 4.957859992980957, + "learning_rate": 0.00018579353128177407, + "loss": 0.5561, + "step": 5520 + }, + { + "epoch": 0.2135217575968184, + "grad_norm": 3.2645647525787354, + "learning_rate": 0.0001857677902621723, + "loss": 0.59, + "step": 5530 + }, + { + "epoch": 0.2139078728908452, + "grad_norm": 1.1365091800689697, + "learning_rate": 0.0001857420492425705, + "loss": 0.443, + "step": 5540 + }, + { + "epoch": 0.214293988184872, + "grad_norm": 3.187476396560669, + "learning_rate": 0.00018571630822296874, + "loss": 0.2612, + "step": 5550 + }, + { + "epoch": 0.2146801034788988, + "grad_norm": 2.6851940155029297, + "learning_rate": 0.00018569056720336692, + "loss": 0.4543, + "step": 5560 + }, + { + "epoch": 0.2150662187729256, + "grad_norm": 2.2613587379455566, + "learning_rate": 0.00018566482618376516, + "loss": 0.3185, + "step": 5570 + }, + { + "epoch": 0.21545233406695238, + "grad_norm": 1.292475700378418, + "learning_rate": 0.00018563908516416335, + "loss": 0.2794, + "step": 5580 + }, + { + "epoch": 0.21583844936097918, + "grad_norm": 2.0878446102142334, + "learning_rate": 0.00018561334414456156, + "loss": 0.3908, + "step": 5590 + }, + { + "epoch": 0.21622456465500597, + "grad_norm": 8.058819770812988, + "learning_rate": 0.0001855876031249598, + "loss": 0.6282, + "step": 5600 + }, + { + "epoch": 0.21661067994903277, + "grad_norm": 1.8231629133224487, + "learning_rate": 0.000185561862105358, + "loss": 0.4973, + "step": 5610 + }, + { + "epoch": 0.21699679524305956, + "grad_norm": 3.947242259979248, + "learning_rate": 0.00018553612108575623, + "loss": 0.4598, + "step": 5620 + }, + { + "epoch": 0.21738291053708636, + "grad_norm": 3.3258073329925537, + "learning_rate": 0.00018551038006615442, + "loss": 0.5266, + "step": 5630 + }, + { + "epoch": 0.21776902583111318, + "grad_norm": 2.301485300064087, + "learning_rate": 0.00018548463904655266, + "loss": 0.4339, + "step": 5640 + }, + { + "epoch": 0.21815514112513998, + "grad_norm": 4.4706878662109375, + "learning_rate": 0.00018545889802695087, + "loss": 0.5233, + "step": 5650 + }, + { + "epoch": 0.21854125641916677, + "grad_norm": 1.1203399896621704, + "learning_rate": 0.00018543315700734906, + "loss": 0.4547, + "step": 5660 + }, + { + "epoch": 0.21892737171319357, + "grad_norm": 0.3744584918022156, + "learning_rate": 0.0001854074159877473, + "loss": 0.2524, + "step": 5670 + }, + { + "epoch": 0.21931348700722036, + "grad_norm": 2.7888870239257812, + "learning_rate": 0.00018538167496814548, + "loss": 0.411, + "step": 5680 + }, + { + "epoch": 0.21969960230124716, + "grad_norm": 4.9972429275512695, + "learning_rate": 0.00018535593394854372, + "loss": 0.6359, + "step": 5690 + }, + { + "epoch": 0.22008571759527396, + "grad_norm": 1.1321420669555664, + "learning_rate": 0.0001853301929289419, + "loss": 0.4068, + "step": 5700 + }, + { + "epoch": 0.22047183288930075, + "grad_norm": 1.9291785955429077, + "learning_rate": 0.00018530445190934015, + "loss": 0.5428, + "step": 5710 + }, + { + "epoch": 0.22085794818332755, + "grad_norm": 0.8663263916969299, + "learning_rate": 0.00018527871088973836, + "loss": 0.4662, + "step": 5720 + }, + { + "epoch": 0.22124406347735434, + "grad_norm": 3.039782762527466, + "learning_rate": 0.00018525296987013655, + "loss": 0.3045, + "step": 5730 + }, + { + "epoch": 0.22163017877138114, + "grad_norm": 1.3552179336547852, + "learning_rate": 0.0001852272288505348, + "loss": 0.3411, + "step": 5740 + }, + { + "epoch": 0.22201629406540793, + "grad_norm": 1.4136948585510254, + "learning_rate": 0.00018520148783093298, + "loss": 0.5517, + "step": 5750 + }, + { + "epoch": 0.22240240935943473, + "grad_norm": 2.463942766189575, + "learning_rate": 0.00018517574681133122, + "loss": 0.4681, + "step": 5760 + }, + { + "epoch": 0.22278852465346152, + "grad_norm": 0.9063917994499207, + "learning_rate": 0.0001851500057917294, + "loss": 0.4537, + "step": 5770 + }, + { + "epoch": 0.22317463994748832, + "grad_norm": 2.352678060531616, + "learning_rate": 0.00018512426477212764, + "loss": 0.4245, + "step": 5780 + }, + { + "epoch": 0.2235607552415151, + "grad_norm": 2.0424869060516357, + "learning_rate": 0.00018509852375252586, + "loss": 0.2892, + "step": 5790 + }, + { + "epoch": 0.2239468705355419, + "grad_norm": 2.7604904174804688, + "learning_rate": 0.00018507278273292404, + "loss": 0.3606, + "step": 5800 + }, + { + "epoch": 0.2243329858295687, + "grad_norm": 2.827798366546631, + "learning_rate": 0.00018504704171332228, + "loss": 0.3212, + "step": 5810 + }, + { + "epoch": 0.2247191011235955, + "grad_norm": 3.1988680362701416, + "learning_rate": 0.00018502130069372047, + "loss": 0.5649, + "step": 5820 + }, + { + "epoch": 0.2251052164176223, + "grad_norm": 1.8216092586517334, + "learning_rate": 0.0001849955596741187, + "loss": 0.2871, + "step": 5830 + }, + { + "epoch": 0.2254913317116491, + "grad_norm": 2.7595627307891846, + "learning_rate": 0.00018496981865451692, + "loss": 0.665, + "step": 5840 + }, + { + "epoch": 0.22587744700567589, + "grad_norm": 1.2395098209381104, + "learning_rate": 0.00018494407763491514, + "loss": 0.2504, + "step": 5850 + }, + { + "epoch": 0.22626356229970268, + "grad_norm": 0.6991098523139954, + "learning_rate": 0.00018491833661531335, + "loss": 0.2263, + "step": 5860 + }, + { + "epoch": 0.22664967759372948, + "grad_norm": 11.053647994995117, + "learning_rate": 0.00018489259559571156, + "loss": 0.5919, + "step": 5870 + }, + { + "epoch": 0.22703579288775627, + "grad_norm": 2.8663880825042725, + "learning_rate": 0.00018486685457610978, + "loss": 0.3399, + "step": 5880 + }, + { + "epoch": 0.22742190818178307, + "grad_norm": 1.4995262622833252, + "learning_rate": 0.00018484111355650796, + "loss": 0.4474, + "step": 5890 + }, + { + "epoch": 0.2278080234758099, + "grad_norm": 3.275681972503662, + "learning_rate": 0.0001848153725369062, + "loss": 0.4347, + "step": 5900 + }, + { + "epoch": 0.22819413876983669, + "grad_norm": 14.772253036499023, + "learning_rate": 0.00018478963151730442, + "loss": 0.3705, + "step": 5910 + }, + { + "epoch": 0.22858025406386348, + "grad_norm": 3.184976816177368, + "learning_rate": 0.00018476389049770263, + "loss": 0.3866, + "step": 5920 + }, + { + "epoch": 0.22896636935789028, + "grad_norm": 2.310765504837036, + "learning_rate": 0.00018473814947810084, + "loss": 0.2717, + "step": 5930 + }, + { + "epoch": 0.22935248465191707, + "grad_norm": 2.061189889907837, + "learning_rate": 0.00018471240845849906, + "loss": 0.2054, + "step": 5940 + }, + { + "epoch": 0.22973859994594387, + "grad_norm": 10.815469741821289, + "learning_rate": 0.00018468666743889727, + "loss": 0.5868, + "step": 5950 + }, + { + "epoch": 0.23012471523997066, + "grad_norm": 1.7080497741699219, + "learning_rate": 0.00018466092641929548, + "loss": 0.236, + "step": 5960 + }, + { + "epoch": 0.23051083053399746, + "grad_norm": 7.389080047607422, + "learning_rate": 0.0001846351853996937, + "loss": 0.2752, + "step": 5970 + }, + { + "epoch": 0.23089694582802425, + "grad_norm": 2.9860422611236572, + "learning_rate": 0.0001846094443800919, + "loss": 0.3436, + "step": 5980 + }, + { + "epoch": 0.23128306112205105, + "grad_norm": 13.12328815460205, + "learning_rate": 0.00018458370336049012, + "loss": 0.3952, + "step": 5990 + }, + { + "epoch": 0.23166917641607784, + "grad_norm": 3.7130823135375977, + "learning_rate": 0.00018455796234088834, + "loss": 0.3658, + "step": 6000 + }, + { + "epoch": 0.23205529171010464, + "grad_norm": 1.8329843282699585, + "learning_rate": 0.00018453222132128655, + "loss": 0.4172, + "step": 6010 + }, + { + "epoch": 0.23244140700413143, + "grad_norm": 1.3583799600601196, + "learning_rate": 0.00018450648030168476, + "loss": 0.4005, + "step": 6020 + }, + { + "epoch": 0.23282752229815823, + "grad_norm": 3.1711816787719727, + "learning_rate": 0.00018448073928208297, + "loss": 0.3674, + "step": 6030 + }, + { + "epoch": 0.23321363759218502, + "grad_norm": 1.576937198638916, + "learning_rate": 0.0001844549982624812, + "loss": 0.3444, + "step": 6040 + }, + { + "epoch": 0.23359975288621182, + "grad_norm": 3.922267436981201, + "learning_rate": 0.0001844292572428794, + "loss": 0.5939, + "step": 6050 + }, + { + "epoch": 0.23398586818023862, + "grad_norm": 2.9851067066192627, + "learning_rate": 0.00018440351622327761, + "loss": 0.2387, + "step": 6060 + }, + { + "epoch": 0.2343719834742654, + "grad_norm": 2.1216888427734375, + "learning_rate": 0.00018437777520367583, + "loss": 0.3836, + "step": 6070 + }, + { + "epoch": 0.2347580987682922, + "grad_norm": 2.9788095951080322, + "learning_rate": 0.00018435203418407404, + "loss": 0.474, + "step": 6080 + }, + { + "epoch": 0.235144214062319, + "grad_norm": 1.0204919576644897, + "learning_rate": 0.00018432629316447225, + "loss": 0.2837, + "step": 6090 + }, + { + "epoch": 0.2355303293563458, + "grad_norm": 0.9091696739196777, + "learning_rate": 0.00018430055214487047, + "loss": 0.6203, + "step": 6100 + }, + { + "epoch": 0.2359164446503726, + "grad_norm": 0.25899162888526917, + "learning_rate": 0.00018427481112526868, + "loss": 0.4759, + "step": 6110 + }, + { + "epoch": 0.2363025599443994, + "grad_norm": 1.8625538349151611, + "learning_rate": 0.0001842490701056669, + "loss": 0.2992, + "step": 6120 + }, + { + "epoch": 0.23668867523842618, + "grad_norm": 1.586521863937378, + "learning_rate": 0.0001842233290860651, + "loss": 0.6122, + "step": 6130 + }, + { + "epoch": 0.23707479053245298, + "grad_norm": 2.387650966644287, + "learning_rate": 0.00018419758806646332, + "loss": 0.3276, + "step": 6140 + }, + { + "epoch": 0.23746090582647977, + "grad_norm": 4.840515613555908, + "learning_rate": 0.00018417184704686153, + "loss": 0.6295, + "step": 6150 + }, + { + "epoch": 0.2378470211205066, + "grad_norm": 1.70024836063385, + "learning_rate": 0.00018414610602725975, + "loss": 0.2047, + "step": 6160 + }, + { + "epoch": 0.2382331364145334, + "grad_norm": 2.791619062423706, + "learning_rate": 0.00018412036500765796, + "loss": 0.4364, + "step": 6170 + }, + { + "epoch": 0.2386192517085602, + "grad_norm": 3.710066318511963, + "learning_rate": 0.00018409462398805617, + "loss": 0.4564, + "step": 6180 + }, + { + "epoch": 0.23900536700258698, + "grad_norm": 2.564347982406616, + "learning_rate": 0.0001840688829684544, + "loss": 0.3156, + "step": 6190 + }, + { + "epoch": 0.23939148229661378, + "grad_norm": 2.3921267986297607, + "learning_rate": 0.0001840431419488526, + "loss": 0.3483, + "step": 6200 + }, + { + "epoch": 0.23977759759064057, + "grad_norm": 1.4785810708999634, + "learning_rate": 0.00018401740092925081, + "loss": 0.4338, + "step": 6210 + }, + { + "epoch": 0.24016371288466737, + "grad_norm": 3.624790906906128, + "learning_rate": 0.00018399165990964903, + "loss": 0.7156, + "step": 6220 + }, + { + "epoch": 0.24054982817869416, + "grad_norm": 3.942161798477173, + "learning_rate": 0.00018396591889004724, + "loss": 0.3932, + "step": 6230 + }, + { + "epoch": 0.24093594347272096, + "grad_norm": 3.2236740589141846, + "learning_rate": 0.00018394017787044545, + "loss": 0.3933, + "step": 6240 + }, + { + "epoch": 0.24132205876674775, + "grad_norm": 2.5040500164031982, + "learning_rate": 0.00018391443685084367, + "loss": 0.5711, + "step": 6250 + }, + { + "epoch": 0.24170817406077455, + "grad_norm": 1.9934203624725342, + "learning_rate": 0.00018388869583124188, + "loss": 0.3074, + "step": 6260 + }, + { + "epoch": 0.24209428935480135, + "grad_norm": 3.702509641647339, + "learning_rate": 0.0001838629548116401, + "loss": 0.3454, + "step": 6270 + }, + { + "epoch": 0.24248040464882814, + "grad_norm": 2.076802968978882, + "learning_rate": 0.0001838372137920383, + "loss": 0.3044, + "step": 6280 + }, + { + "epoch": 0.24286651994285494, + "grad_norm": 5.798679351806641, + "learning_rate": 0.00018381147277243652, + "loss": 0.3396, + "step": 6290 + }, + { + "epoch": 0.24325263523688173, + "grad_norm": 4.698869705200195, + "learning_rate": 0.00018378573175283473, + "loss": 0.3735, + "step": 6300 + }, + { + "epoch": 0.24363875053090853, + "grad_norm": 3.029979705810547, + "learning_rate": 0.00018375999073323295, + "loss": 0.3891, + "step": 6310 + }, + { + "epoch": 0.24402486582493532, + "grad_norm": 2.5507185459136963, + "learning_rate": 0.00018373424971363116, + "loss": 0.4854, + "step": 6320 + }, + { + "epoch": 0.24441098111896212, + "grad_norm": 3.2052571773529053, + "learning_rate": 0.00018370850869402937, + "loss": 0.6789, + "step": 6330 + }, + { + "epoch": 0.2447970964129889, + "grad_norm": 1.9265435934066772, + "learning_rate": 0.00018368276767442761, + "loss": 0.4505, + "step": 6340 + }, + { + "epoch": 0.2451832117070157, + "grad_norm": 0.8391959071159363, + "learning_rate": 0.0001836570266548258, + "loss": 0.3432, + "step": 6350 + }, + { + "epoch": 0.2455693270010425, + "grad_norm": 3.4653851985931396, + "learning_rate": 0.00018363128563522401, + "loss": 0.3571, + "step": 6360 + }, + { + "epoch": 0.2459554422950693, + "grad_norm": 2.3033368587493896, + "learning_rate": 0.00018360554461562223, + "loss": 0.3625, + "step": 6370 + }, + { + "epoch": 0.2463415575890961, + "grad_norm": 1.659408450126648, + "learning_rate": 0.00018357980359602044, + "loss": 0.5311, + "step": 6380 + }, + { + "epoch": 0.2467276728831229, + "grad_norm": 1.1839714050292969, + "learning_rate": 0.00018355406257641865, + "loss": 0.3905, + "step": 6390 + }, + { + "epoch": 0.24711378817714968, + "grad_norm": 0.49230822920799255, + "learning_rate": 0.00018352832155681687, + "loss": 0.4021, + "step": 6400 + }, + { + "epoch": 0.24749990347117648, + "grad_norm": 4.451594829559326, + "learning_rate": 0.0001835025805372151, + "loss": 0.4504, + "step": 6410 + }, + { + "epoch": 0.2478860187652033, + "grad_norm": 1.0058324337005615, + "learning_rate": 0.0001834768395176133, + "loss": 0.2636, + "step": 6420 + }, + { + "epoch": 0.2482721340592301, + "grad_norm": 2.7853894233703613, + "learning_rate": 0.0001834510984980115, + "loss": 0.47, + "step": 6430 + }, + { + "epoch": 0.2486582493532569, + "grad_norm": 2.730095148086548, + "learning_rate": 0.00018342535747840972, + "loss": 0.3941, + "step": 6440 + }, + { + "epoch": 0.2490443646472837, + "grad_norm": 2.4993178844451904, + "learning_rate": 0.00018339961645880793, + "loss": 0.5777, + "step": 6450 + }, + { + "epoch": 0.24943047994131048, + "grad_norm": 2.361525297164917, + "learning_rate": 0.00018337387543920617, + "loss": 0.3798, + "step": 6460 + }, + { + "epoch": 0.24981659523533728, + "grad_norm": 2.5558526515960693, + "learning_rate": 0.00018334813441960436, + "loss": 0.3113, + "step": 6470 + }, + { + "epoch": 0.25020271052936405, + "grad_norm": 0.8033503890037537, + "learning_rate": 0.0001833223934000026, + "loss": 0.5254, + "step": 6480 + }, + { + "epoch": 0.25058882582339087, + "grad_norm": 2.721090078353882, + "learning_rate": 0.0001832966523804008, + "loss": 0.393, + "step": 6490 + }, + { + "epoch": 0.25097494111741764, + "grad_norm": 1.7147916555404663, + "learning_rate": 0.000183270911360799, + "loss": 0.3225, + "step": 6500 + }, + { + "epoch": 0.25136105641144446, + "grad_norm": 2.388347864151001, + "learning_rate": 0.00018324517034119721, + "loss": 0.3519, + "step": 6510 + }, + { + "epoch": 0.25174717170547123, + "grad_norm": 2.470891237258911, + "learning_rate": 0.00018321942932159543, + "loss": 0.4384, + "step": 6520 + }, + { + "epoch": 0.25213328699949805, + "grad_norm": 1.4743351936340332, + "learning_rate": 0.00018319368830199367, + "loss": 0.2464, + "step": 6530 + }, + { + "epoch": 0.2525194022935248, + "grad_norm": 1.5889122486114502, + "learning_rate": 0.00018316794728239185, + "loss": 0.3149, + "step": 6540 + }, + { + "epoch": 0.25290551758755164, + "grad_norm": 4.900819778442383, + "learning_rate": 0.0001831422062627901, + "loss": 0.3978, + "step": 6550 + }, + { + "epoch": 0.25329163288157847, + "grad_norm": 5.22566556930542, + "learning_rate": 0.00018311646524318828, + "loss": 0.4473, + "step": 6560 + }, + { + "epoch": 0.25367774817560523, + "grad_norm": 4.7480363845825195, + "learning_rate": 0.0001830907242235865, + "loss": 0.3976, + "step": 6570 + }, + { + "epoch": 0.25406386346963206, + "grad_norm": 1.4711374044418335, + "learning_rate": 0.0001830649832039847, + "loss": 0.5183, + "step": 6580 + }, + { + "epoch": 0.2544499787636588, + "grad_norm": 2.237309217453003, + "learning_rate": 0.00018303924218438292, + "loss": 0.2171, + "step": 6590 + }, + { + "epoch": 0.25483609405768565, + "grad_norm": 4.107303619384766, + "learning_rate": 0.00018301350116478116, + "loss": 0.3918, + "step": 6600 + }, + { + "epoch": 0.2552222093517124, + "grad_norm": 4.7285003662109375, + "learning_rate": 0.00018298776014517935, + "loss": 0.2042, + "step": 6610 + }, + { + "epoch": 0.25560832464573924, + "grad_norm": 2.1333792209625244, + "learning_rate": 0.0001829620191255776, + "loss": 0.3502, + "step": 6620 + }, + { + "epoch": 0.255994439939766, + "grad_norm": 3.062173843383789, + "learning_rate": 0.00018293627810597577, + "loss": 0.3949, + "step": 6630 + }, + { + "epoch": 0.25638055523379283, + "grad_norm": 1.538854956626892, + "learning_rate": 0.00018291053708637401, + "loss": 0.4613, + "step": 6640 + }, + { + "epoch": 0.2567666705278196, + "grad_norm": 2.546586751937866, + "learning_rate": 0.00018288479606677223, + "loss": 0.5868, + "step": 6650 + }, + { + "epoch": 0.2571527858218464, + "grad_norm": 2.7282049655914307, + "learning_rate": 0.00018285905504717041, + "loss": 0.4186, + "step": 6660 + }, + { + "epoch": 0.2575389011158732, + "grad_norm": 3.204634189605713, + "learning_rate": 0.00018283331402756865, + "loss": 0.4072, + "step": 6670 + }, + { + "epoch": 0.2579250164099, + "grad_norm": 2.421846866607666, + "learning_rate": 0.00018280757300796684, + "loss": 0.306, + "step": 6680 + }, + { + "epoch": 0.2583111317039268, + "grad_norm": 4.243416786193848, + "learning_rate": 0.00018278183198836508, + "loss": 0.2631, + "step": 6690 + }, + { + "epoch": 0.2586972469979536, + "grad_norm": 1.0495362281799316, + "learning_rate": 0.00018275609096876327, + "loss": 0.3488, + "step": 6700 + }, + { + "epoch": 0.25908336229198037, + "grad_norm": 1.915279746055603, + "learning_rate": 0.0001827303499491615, + "loss": 0.2589, + "step": 6710 + }, + { + "epoch": 0.2594694775860072, + "grad_norm": 3.724299192428589, + "learning_rate": 0.00018270460892955972, + "loss": 0.5118, + "step": 6720 + }, + { + "epoch": 0.25985559288003396, + "grad_norm": 2.832204580307007, + "learning_rate": 0.0001826788679099579, + "loss": 0.2508, + "step": 6730 + }, + { + "epoch": 0.2602417081740608, + "grad_norm": 1.1942508220672607, + "learning_rate": 0.00018265312689035615, + "loss": 0.4328, + "step": 6740 + }, + { + "epoch": 0.26062782346808755, + "grad_norm": 1.0741711854934692, + "learning_rate": 0.00018262738587075433, + "loss": 0.3514, + "step": 6750 + }, + { + "epoch": 0.2610139387621144, + "grad_norm": 2.9918277263641357, + "learning_rate": 0.00018260164485115257, + "loss": 0.3528, + "step": 6760 + }, + { + "epoch": 0.26140005405614114, + "grad_norm": 1.3773655891418457, + "learning_rate": 0.0001825759038315508, + "loss": 0.365, + "step": 6770 + }, + { + "epoch": 0.26178616935016796, + "grad_norm": 3.5288615226745605, + "learning_rate": 0.000182550162811949, + "loss": 0.3645, + "step": 6780 + }, + { + "epoch": 0.26217228464419473, + "grad_norm": 1.2178785800933838, + "learning_rate": 0.00018252442179234721, + "loss": 0.3742, + "step": 6790 + }, + { + "epoch": 0.26255839993822155, + "grad_norm": 2.7981081008911133, + "learning_rate": 0.0001824986807727454, + "loss": 0.6174, + "step": 6800 + }, + { + "epoch": 0.2629445152322484, + "grad_norm": 1.6766215562820435, + "learning_rate": 0.00018247293975314364, + "loss": 0.3028, + "step": 6810 + }, + { + "epoch": 0.26333063052627514, + "grad_norm": 3.7797629833221436, + "learning_rate": 0.00018244719873354183, + "loss": 0.2633, + "step": 6820 + }, + { + "epoch": 0.26371674582030197, + "grad_norm": 7.794743537902832, + "learning_rate": 0.00018242145771394007, + "loss": 0.3586, + "step": 6830 + }, + { + "epoch": 0.26410286111432874, + "grad_norm": 0.5704814195632935, + "learning_rate": 0.00018239571669433828, + "loss": 0.3506, + "step": 6840 + }, + { + "epoch": 0.26448897640835556, + "grad_norm": 5.771059513092041, + "learning_rate": 0.0001823699756747365, + "loss": 0.3881, + "step": 6850 + }, + { + "epoch": 0.2648750917023823, + "grad_norm": 2.723592519760132, + "learning_rate": 0.0001823442346551347, + "loss": 0.3955, + "step": 6860 + }, + { + "epoch": 0.26526120699640915, + "grad_norm": 1.5448215007781982, + "learning_rate": 0.0001823184936355329, + "loss": 0.495, + "step": 6870 + }, + { + "epoch": 0.2656473222904359, + "grad_norm": 2.2980363368988037, + "learning_rate": 0.00018229275261593113, + "loss": 0.2695, + "step": 6880 + }, + { + "epoch": 0.26603343758446274, + "grad_norm": 1.959811806678772, + "learning_rate": 0.00018226701159632932, + "loss": 0.383, + "step": 6890 + }, + { + "epoch": 0.2664195528784895, + "grad_norm": 2.1491482257843018, + "learning_rate": 0.00018224127057672756, + "loss": 0.5655, + "step": 6900 + }, + { + "epoch": 0.26680566817251633, + "grad_norm": 6.472841262817383, + "learning_rate": 0.00018221552955712577, + "loss": 0.4757, + "step": 6910 + }, + { + "epoch": 0.2671917834665431, + "grad_norm": 7.878561496734619, + "learning_rate": 0.000182189788537524, + "loss": 0.3944, + "step": 6920 + }, + { + "epoch": 0.2675778987605699, + "grad_norm": 0.052701435983181, + "learning_rate": 0.0001821640475179222, + "loss": 0.382, + "step": 6930 + }, + { + "epoch": 0.2679640140545967, + "grad_norm": 2.294677972793579, + "learning_rate": 0.00018213830649832039, + "loss": 0.2932, + "step": 6940 + }, + { + "epoch": 0.2683501293486235, + "grad_norm": 1.6058757305145264, + "learning_rate": 0.00018211256547871863, + "loss": 0.4438, + "step": 6950 + }, + { + "epoch": 0.2687362446426503, + "grad_norm": 4.003495693206787, + "learning_rate": 0.00018208682445911684, + "loss": 0.5945, + "step": 6960 + }, + { + "epoch": 0.2691223599366771, + "grad_norm": 1.423017144203186, + "learning_rate": 0.00018206108343951505, + "loss": 0.4356, + "step": 6970 + }, + { + "epoch": 0.26950847523070387, + "grad_norm": 2.206341028213501, + "learning_rate": 0.00018203534241991327, + "loss": 0.344, + "step": 6980 + }, + { + "epoch": 0.2698945905247307, + "grad_norm": 0.6644784212112427, + "learning_rate": 0.00018200960140031148, + "loss": 0.4988, + "step": 6990 + }, + { + "epoch": 0.27028070581875746, + "grad_norm": 2.4569833278656006, + "learning_rate": 0.0001819838603807097, + "loss": 0.3689, + "step": 7000 + }, + { + "epoch": 0.2706668211127843, + "grad_norm": 1.554567575454712, + "learning_rate": 0.00018195811936110788, + "loss": 0.4684, + "step": 7010 + }, + { + "epoch": 0.27105293640681105, + "grad_norm": 3.2556328773498535, + "learning_rate": 0.00018193237834150612, + "loss": 0.611, + "step": 7020 + }, + { + "epoch": 0.2714390517008379, + "grad_norm": 2.9123427867889404, + "learning_rate": 0.00018190663732190433, + "loss": 0.4278, + "step": 7030 + }, + { + "epoch": 0.27182516699486464, + "grad_norm": 2.159273862838745, + "learning_rate": 0.00018188089630230255, + "loss": 0.2384, + "step": 7040 + }, + { + "epoch": 0.27221128228889147, + "grad_norm": 3.4977822303771973, + "learning_rate": 0.00018185515528270076, + "loss": 0.5459, + "step": 7050 + }, + { + "epoch": 0.27259739758291823, + "grad_norm": 1.1822031736373901, + "learning_rate": 0.00018182941426309897, + "loss": 0.4364, + "step": 7060 + }, + { + "epoch": 0.27298351287694506, + "grad_norm": 2.4467339515686035, + "learning_rate": 0.00018180367324349719, + "loss": 0.5198, + "step": 7070 + }, + { + "epoch": 0.2733696281709719, + "grad_norm": 1.0406467914581299, + "learning_rate": 0.0001817779322238954, + "loss": 0.2797, + "step": 7080 + }, + { + "epoch": 0.27375574346499865, + "grad_norm": 1.925830602645874, + "learning_rate": 0.0001817521912042936, + "loss": 0.4898, + "step": 7090 + }, + { + "epoch": 0.27414185875902547, + "grad_norm": 3.0385682582855225, + "learning_rate": 0.00018172645018469183, + "loss": 0.3867, + "step": 7100 + }, + { + "epoch": 0.27452797405305224, + "grad_norm": 1.5285695791244507, + "learning_rate": 0.00018170070916509004, + "loss": 0.4233, + "step": 7110 + }, + { + "epoch": 0.27491408934707906, + "grad_norm": 1.266693115234375, + "learning_rate": 0.00018167496814548825, + "loss": 0.4724, + "step": 7120 + }, + { + "epoch": 0.27530020464110583, + "grad_norm": 3.371323585510254, + "learning_rate": 0.00018164922712588647, + "loss": 0.533, + "step": 7130 + }, + { + "epoch": 0.27568631993513265, + "grad_norm": 2.662691116333008, + "learning_rate": 0.00018162348610628468, + "loss": 0.3134, + "step": 7140 + }, + { + "epoch": 0.2760724352291594, + "grad_norm": 1.8977057933807373, + "learning_rate": 0.0001815977450866829, + "loss": 0.3038, + "step": 7150 + }, + { + "epoch": 0.27645855052318624, + "grad_norm": 3.1027894020080566, + "learning_rate": 0.0001815720040670811, + "loss": 0.5074, + "step": 7160 + }, + { + "epoch": 0.276844665817213, + "grad_norm": 1.2112785577774048, + "learning_rate": 0.00018154626304747932, + "loss": 0.324, + "step": 7170 + }, + { + "epoch": 0.27723078111123983, + "grad_norm": 1.6500996351242065, + "learning_rate": 0.00018152052202787753, + "loss": 0.2856, + "step": 7180 + }, + { + "epoch": 0.2776168964052666, + "grad_norm": 3.215747833251953, + "learning_rate": 0.00018149478100827575, + "loss": 0.4522, + "step": 7190 + }, + { + "epoch": 0.2780030116992934, + "grad_norm": 4.8541059494018555, + "learning_rate": 0.00018146903998867396, + "loss": 0.4106, + "step": 7200 + }, + { + "epoch": 0.2783891269933202, + "grad_norm": 2.3697152137756348, + "learning_rate": 0.00018144329896907217, + "loss": 0.2673, + "step": 7210 + }, + { + "epoch": 0.278775242287347, + "grad_norm": 2.9693639278411865, + "learning_rate": 0.00018141755794947039, + "loss": 0.3949, + "step": 7220 + }, + { + "epoch": 0.2791613575813738, + "grad_norm": 2.691817283630371, + "learning_rate": 0.0001813918169298686, + "loss": 0.3427, + "step": 7230 + }, + { + "epoch": 0.2795474728754006, + "grad_norm": 5.197331428527832, + "learning_rate": 0.0001813660759102668, + "loss": 0.4331, + "step": 7240 + }, + { + "epoch": 0.27993358816942737, + "grad_norm": 1.5799933671951294, + "learning_rate": 0.00018134033489066503, + "loss": 0.3543, + "step": 7250 + }, + { + "epoch": 0.2803197034634542, + "grad_norm": 1.3614271879196167, + "learning_rate": 0.00018131459387106324, + "loss": 0.5289, + "step": 7260 + }, + { + "epoch": 0.28070581875748096, + "grad_norm": 2.2942802906036377, + "learning_rate": 0.00018128885285146145, + "loss": 0.4318, + "step": 7270 + }, + { + "epoch": 0.2810919340515078, + "grad_norm": 1.1805604696273804, + "learning_rate": 0.00018126311183185967, + "loss": 0.4754, + "step": 7280 + }, + { + "epoch": 0.28147804934553455, + "grad_norm": 0.5108867883682251, + "learning_rate": 0.00018123737081225788, + "loss": 0.4517, + "step": 7290 + }, + { + "epoch": 0.2818641646395614, + "grad_norm": 1.1736596822738647, + "learning_rate": 0.0001812116297926561, + "loss": 0.4538, + "step": 7300 + }, + { + "epoch": 0.28225027993358814, + "grad_norm": 5.497414588928223, + "learning_rate": 0.0001811858887730543, + "loss": 0.5116, + "step": 7310 + }, + { + "epoch": 0.28263639522761497, + "grad_norm": 1.1347368955612183, + "learning_rate": 0.00018116014775345252, + "loss": 0.3848, + "step": 7320 + }, + { + "epoch": 0.28302251052164173, + "grad_norm": 2.740715742111206, + "learning_rate": 0.00018113440673385073, + "loss": 0.3456, + "step": 7330 + }, + { + "epoch": 0.28340862581566856, + "grad_norm": 1.3853389024734497, + "learning_rate": 0.00018110866571424897, + "loss": 0.3398, + "step": 7340 + }, + { + "epoch": 0.2837947411096954, + "grad_norm": 7.493706703186035, + "learning_rate": 0.00018108292469464716, + "loss": 0.2726, + "step": 7350 + }, + { + "epoch": 0.28418085640372215, + "grad_norm": 1.81704843044281, + "learning_rate": 0.00018105718367504537, + "loss": 0.3818, + "step": 7360 + }, + { + "epoch": 0.28456697169774897, + "grad_norm": 2.4877755641937256, + "learning_rate": 0.00018103144265544359, + "loss": 0.3499, + "step": 7370 + }, + { + "epoch": 0.28495308699177574, + "grad_norm": 1.3704471588134766, + "learning_rate": 0.0001810057016358418, + "loss": 0.2346, + "step": 7380 + }, + { + "epoch": 0.28533920228580256, + "grad_norm": 2.664745569229126, + "learning_rate": 0.00018097996061624, + "loss": 0.4041, + "step": 7390 + }, + { + "epoch": 0.28572531757982933, + "grad_norm": 3.6539089679718018, + "learning_rate": 0.00018095421959663823, + "loss": 0.2885, + "step": 7400 + }, + { + "epoch": 0.28611143287385615, + "grad_norm": 0.8653857707977295, + "learning_rate": 0.00018092847857703647, + "loss": 0.3849, + "step": 7410 + }, + { + "epoch": 0.2864975481678829, + "grad_norm": 2.6319446563720703, + "learning_rate": 0.00018090273755743465, + "loss": 0.2728, + "step": 7420 + }, + { + "epoch": 0.28688366346190974, + "grad_norm": 2.3457818031311035, + "learning_rate": 0.00018087699653783287, + "loss": 0.446, + "step": 7430 + }, + { + "epoch": 0.2872697787559365, + "grad_norm": 0.8546158671379089, + "learning_rate": 0.00018085125551823108, + "loss": 0.2898, + "step": 7440 + }, + { + "epoch": 0.28765589404996333, + "grad_norm": 0.45937278866767883, + "learning_rate": 0.0001808255144986293, + "loss": 0.583, + "step": 7450 + }, + { + "epoch": 0.2880420093439901, + "grad_norm": 1.7129520177841187, + "learning_rate": 0.00018079977347902753, + "loss": 0.4908, + "step": 7460 + }, + { + "epoch": 0.2884281246380169, + "grad_norm": 4.106715679168701, + "learning_rate": 0.00018077403245942572, + "loss": 0.3373, + "step": 7470 + }, + { + "epoch": 0.2888142399320437, + "grad_norm": 3.8112800121307373, + "learning_rate": 0.00018074829143982396, + "loss": 0.392, + "step": 7480 + }, + { + "epoch": 0.2892003552260705, + "grad_norm": 0.5382593274116516, + "learning_rate": 0.00018072255042022215, + "loss": 0.2929, + "step": 7490 + }, + { + "epoch": 0.2895864705200973, + "grad_norm": 2.50888991355896, + "learning_rate": 0.00018069680940062036, + "loss": 0.3361, + "step": 7500 + }, + { + "epoch": 0.2899725858141241, + "grad_norm": 3.3544275760650635, + "learning_rate": 0.00018067106838101857, + "loss": 0.388, + "step": 7510 + }, + { + "epoch": 0.2903587011081509, + "grad_norm": 1.192386507987976, + "learning_rate": 0.00018064532736141679, + "loss": 0.4427, + "step": 7520 + }, + { + "epoch": 0.2907448164021777, + "grad_norm": 1.5527079105377197, + "learning_rate": 0.00018061958634181503, + "loss": 0.4023, + "step": 7530 + }, + { + "epoch": 0.29113093169620446, + "grad_norm": 0.67446368932724, + "learning_rate": 0.0001805938453222132, + "loss": 0.4949, + "step": 7540 + }, + { + "epoch": 0.2915170469902313, + "grad_norm": 1.6349838972091675, + "learning_rate": 0.00018056810430261145, + "loss": 0.3811, + "step": 7550 + }, + { + "epoch": 0.29190316228425806, + "grad_norm": 1.4848904609680176, + "learning_rate": 0.00018054236328300964, + "loss": 0.3851, + "step": 7560 + }, + { + "epoch": 0.2922892775782849, + "grad_norm": 0.9933151006698608, + "learning_rate": 0.00018051662226340785, + "loss": 0.4699, + "step": 7570 + }, + { + "epoch": 0.29267539287231165, + "grad_norm": 1.1026233434677124, + "learning_rate": 0.00018049088124380607, + "loss": 0.3287, + "step": 7580 + }, + { + "epoch": 0.29306150816633847, + "grad_norm": 1.232954740524292, + "learning_rate": 0.00018046514022420428, + "loss": 0.3722, + "step": 7590 + }, + { + "epoch": 0.2934476234603653, + "grad_norm": 3.8303146362304688, + "learning_rate": 0.00018043939920460252, + "loss": 0.2985, + "step": 7600 + }, + { + "epoch": 0.29383373875439206, + "grad_norm": 1.9358845949172974, + "learning_rate": 0.0001804136581850007, + "loss": 0.4361, + "step": 7610 + }, + { + "epoch": 0.2942198540484189, + "grad_norm": 1.8905962705612183, + "learning_rate": 0.00018038791716539895, + "loss": 0.2835, + "step": 7620 + }, + { + "epoch": 0.29460596934244565, + "grad_norm": 1.9965651035308838, + "learning_rate": 0.00018036217614579713, + "loss": 0.5387, + "step": 7630 + }, + { + "epoch": 0.2949920846364725, + "grad_norm": 4.204270839691162, + "learning_rate": 0.00018033643512619535, + "loss": 0.3498, + "step": 7640 + }, + { + "epoch": 0.29537819993049924, + "grad_norm": 1.4732340574264526, + "learning_rate": 0.00018031069410659359, + "loss": 0.315, + "step": 7650 + }, + { + "epoch": 0.29576431522452606, + "grad_norm": 1.0233594179153442, + "learning_rate": 0.00018028495308699177, + "loss": 0.1536, + "step": 7660 + }, + { + "epoch": 0.29615043051855283, + "grad_norm": 3.1531457901000977, + "learning_rate": 0.00018025921206739, + "loss": 0.3793, + "step": 7670 + }, + { + "epoch": 0.29653654581257965, + "grad_norm": 0.8080945014953613, + "learning_rate": 0.0001802334710477882, + "loss": 0.5589, + "step": 7680 + }, + { + "epoch": 0.2969226611066064, + "grad_norm": 3.1202728748321533, + "learning_rate": 0.00018020773002818644, + "loss": 0.4652, + "step": 7690 + }, + { + "epoch": 0.29730877640063325, + "grad_norm": 2.5934784412384033, + "learning_rate": 0.00018018198900858463, + "loss": 0.4921, + "step": 7700 + }, + { + "epoch": 0.29769489169466, + "grad_norm": 2.858642101287842, + "learning_rate": 0.00018015624798898284, + "loss": 0.2732, + "step": 7710 + }, + { + "epoch": 0.29808100698868684, + "grad_norm": 3.621229887008667, + "learning_rate": 0.00018013050696938108, + "loss": 0.5639, + "step": 7720 + }, + { + "epoch": 0.2984671222827136, + "grad_norm": 3.7943220138549805, + "learning_rate": 0.00018010476594977926, + "loss": 0.3177, + "step": 7730 + }, + { + "epoch": 0.2988532375767404, + "grad_norm": 1.6371623277664185, + "learning_rate": 0.0001800790249301775, + "loss": 0.4211, + "step": 7740 + }, + { + "epoch": 0.2992393528707672, + "grad_norm": 1.9557713270187378, + "learning_rate": 0.0001800532839105757, + "loss": 0.4351, + "step": 7750 + }, + { + "epoch": 0.299625468164794, + "grad_norm": 2.684964895248413, + "learning_rate": 0.00018002754289097393, + "loss": 0.39, + "step": 7760 + }, + { + "epoch": 0.3000115834588208, + "grad_norm": 1.7401316165924072, + "learning_rate": 0.00018000180187137215, + "loss": 0.2844, + "step": 7770 + }, + { + "epoch": 0.3003976987528476, + "grad_norm": 0.6305844187736511, + "learning_rate": 0.00017997606085177033, + "loss": 0.2472, + "step": 7780 + }, + { + "epoch": 0.3007838140468744, + "grad_norm": 2.2880289554595947, + "learning_rate": 0.00017995031983216857, + "loss": 0.3952, + "step": 7790 + }, + { + "epoch": 0.3011699293409012, + "grad_norm": 3.423980951309204, + "learning_rate": 0.00017992457881256676, + "loss": 0.4459, + "step": 7800 + }, + { + "epoch": 0.30155604463492797, + "grad_norm": 0.6920475363731384, + "learning_rate": 0.000179898837792965, + "loss": 0.2909, + "step": 7810 + }, + { + "epoch": 0.3019421599289548, + "grad_norm": 0.8905349373817444, + "learning_rate": 0.00017987309677336318, + "loss": 0.346, + "step": 7820 + }, + { + "epoch": 0.30232827522298156, + "grad_norm": 1.8836702108383179, + "learning_rate": 0.00017984735575376143, + "loss": 0.4038, + "step": 7830 + }, + { + "epoch": 0.3027143905170084, + "grad_norm": 2.6712753772735596, + "learning_rate": 0.00017982161473415964, + "loss": 0.3452, + "step": 7840 + }, + { + "epoch": 0.30310050581103515, + "grad_norm": 2.344122886657715, + "learning_rate": 0.00017979587371455785, + "loss": 0.5091, + "step": 7850 + }, + { + "epoch": 0.30348662110506197, + "grad_norm": 3.734415054321289, + "learning_rate": 0.00017977013269495607, + "loss": 0.3893, + "step": 7860 + }, + { + "epoch": 0.3038727363990888, + "grad_norm": 1.70572829246521, + "learning_rate": 0.00017974439167535425, + "loss": 0.4829, + "step": 7870 + }, + { + "epoch": 0.30425885169311556, + "grad_norm": 1.779189109802246, + "learning_rate": 0.0001797186506557525, + "loss": 0.5361, + "step": 7880 + }, + { + "epoch": 0.3046449669871424, + "grad_norm": 2.888803482055664, + "learning_rate": 0.00017969290963615068, + "loss": 0.4305, + "step": 7890 + }, + { + "epoch": 0.30503108228116915, + "grad_norm": 1.2247655391693115, + "learning_rate": 0.00017966716861654892, + "loss": 0.3817, + "step": 7900 + }, + { + "epoch": 0.305417197575196, + "grad_norm": 2.995152473449707, + "learning_rate": 0.00017964142759694713, + "loss": 0.4669, + "step": 7910 + }, + { + "epoch": 0.30580331286922274, + "grad_norm": 8.049060821533203, + "learning_rate": 0.00017961568657734535, + "loss": 0.6706, + "step": 7920 + }, + { + "epoch": 0.30618942816324957, + "grad_norm": 2.1181435585021973, + "learning_rate": 0.00017958994555774356, + "loss": 0.4353, + "step": 7930 + }, + { + "epoch": 0.30657554345727633, + "grad_norm": 8.394509315490723, + "learning_rate": 0.00017956420453814174, + "loss": 0.3497, + "step": 7940 + }, + { + "epoch": 0.30696165875130316, + "grad_norm": 2.5140750408172607, + "learning_rate": 0.00017953846351853998, + "loss": 0.5774, + "step": 7950 + }, + { + "epoch": 0.3073477740453299, + "grad_norm": 2.720942974090576, + "learning_rate": 0.0001795127224989382, + "loss": 0.4457, + "step": 7960 + }, + { + "epoch": 0.30773388933935675, + "grad_norm": 1.8155667781829834, + "learning_rate": 0.0001794869814793364, + "loss": 0.4155, + "step": 7970 + }, + { + "epoch": 0.3081200046333835, + "grad_norm": 1.9989752769470215, + "learning_rate": 0.00017946124045973462, + "loss": 0.3233, + "step": 7980 + }, + { + "epoch": 0.30850611992741034, + "grad_norm": 0.7483557462692261, + "learning_rate": 0.00017943549944013284, + "loss": 0.2932, + "step": 7990 + }, + { + "epoch": 0.3088922352214371, + "grad_norm": 0.5750642418861389, + "learning_rate": 0.00017940975842053105, + "loss": 0.401, + "step": 8000 + }, + { + "epoch": 0.30927835051546393, + "grad_norm": 1.2084500789642334, + "learning_rate": 0.00017938401740092924, + "loss": 0.3705, + "step": 8010 + }, + { + "epoch": 0.3096644658094907, + "grad_norm": 1.833434820175171, + "learning_rate": 0.00017935827638132748, + "loss": 0.3507, + "step": 8020 + }, + { + "epoch": 0.3100505811035175, + "grad_norm": 3.147508382797241, + "learning_rate": 0.0001793325353617257, + "loss": 0.3255, + "step": 8030 + }, + { + "epoch": 0.3104366963975443, + "grad_norm": 2.150932788848877, + "learning_rate": 0.0001793067943421239, + "loss": 0.3401, + "step": 8040 + }, + { + "epoch": 0.3108228116915711, + "grad_norm": 3.3340635299682617, + "learning_rate": 0.00017928105332252212, + "loss": 0.3606, + "step": 8050 + }, + { + "epoch": 0.3112089269855979, + "grad_norm": 5.173205375671387, + "learning_rate": 0.00017925531230292033, + "loss": 0.1695, + "step": 8060 + }, + { + "epoch": 0.3115950422796247, + "grad_norm": 1.0863877534866333, + "learning_rate": 0.00017922957128331854, + "loss": 0.3038, + "step": 8070 + }, + { + "epoch": 0.31198115757365147, + "grad_norm": 1.5977118015289307, + "learning_rate": 0.00017920383026371676, + "loss": 0.2291, + "step": 8080 + }, + { + "epoch": 0.3123672728676783, + "grad_norm": 4.040243625640869, + "learning_rate": 0.00017917808924411497, + "loss": 0.8538, + "step": 8090 + }, + { + "epoch": 0.31275338816170506, + "grad_norm": 1.5926854610443115, + "learning_rate": 0.00017915234822451318, + "loss": 0.4733, + "step": 8100 + }, + { + "epoch": 0.3131395034557319, + "grad_norm": 1.0959421396255493, + "learning_rate": 0.0001791266072049114, + "loss": 0.6076, + "step": 8110 + }, + { + "epoch": 0.3135256187497587, + "grad_norm": 2.786085367202759, + "learning_rate": 0.0001791008661853096, + "loss": 0.3229, + "step": 8120 + }, + { + "epoch": 0.3139117340437855, + "grad_norm": 2.2573914527893066, + "learning_rate": 0.00017907512516570782, + "loss": 0.3676, + "step": 8130 + }, + { + "epoch": 0.3142978493378123, + "grad_norm": 2.271852493286133, + "learning_rate": 0.00017904938414610604, + "loss": 0.6275, + "step": 8140 + }, + { + "epoch": 0.31468396463183906, + "grad_norm": 1.9762821197509766, + "learning_rate": 0.00017902364312650425, + "loss": 0.2232, + "step": 8150 + }, + { + "epoch": 0.3150700799258659, + "grad_norm": 2.9960873126983643, + "learning_rate": 0.00017899790210690246, + "loss": 0.4739, + "step": 8160 + }, + { + "epoch": 0.31545619521989265, + "grad_norm": 1.142216682434082, + "learning_rate": 0.00017897216108730068, + "loss": 0.5983, + "step": 8170 + }, + { + "epoch": 0.3158423105139195, + "grad_norm": 1.7127768993377686, + "learning_rate": 0.0001789464200676989, + "loss": 0.4131, + "step": 8180 + }, + { + "epoch": 0.31622842580794625, + "grad_norm": 1.579793095588684, + "learning_rate": 0.0001789206790480971, + "loss": 0.3119, + "step": 8190 + }, + { + "epoch": 0.31661454110197307, + "grad_norm": 0.9647886157035828, + "learning_rate": 0.00017889493802849532, + "loss": 0.398, + "step": 8200 + }, + { + "epoch": 0.31700065639599984, + "grad_norm": 3.435312032699585, + "learning_rate": 0.00017886919700889353, + "loss": 0.405, + "step": 8210 + }, + { + "epoch": 0.31738677169002666, + "grad_norm": 2.1500205993652344, + "learning_rate": 0.00017884345598929174, + "loss": 0.2519, + "step": 8220 + }, + { + "epoch": 0.3177728869840534, + "grad_norm": 1.3107216358184814, + "learning_rate": 0.00017881771496968996, + "loss": 0.2846, + "step": 8230 + }, + { + "epoch": 0.31815900227808025, + "grad_norm": 0.1899029165506363, + "learning_rate": 0.00017879197395008817, + "loss": 0.4597, + "step": 8240 + }, + { + "epoch": 0.318545117572107, + "grad_norm": 2.329299211502075, + "learning_rate": 0.00017876623293048638, + "loss": 0.6523, + "step": 8250 + }, + { + "epoch": 0.31893123286613384, + "grad_norm": 0.41523978114128113, + "learning_rate": 0.0001787404919108846, + "loss": 0.3266, + "step": 8260 + }, + { + "epoch": 0.3193173481601606, + "grad_norm": 0.7914639711380005, + "learning_rate": 0.0001787147508912828, + "loss": 0.4029, + "step": 8270 + }, + { + "epoch": 0.31970346345418743, + "grad_norm": 0.6159287691116333, + "learning_rate": 0.00017868900987168102, + "loss": 0.4426, + "step": 8280 + }, + { + "epoch": 0.3200895787482142, + "grad_norm": 1.3690640926361084, + "learning_rate": 0.00017866326885207924, + "loss": 0.2974, + "step": 8290 + }, + { + "epoch": 0.320475694042241, + "grad_norm": 0.8592869639396667, + "learning_rate": 0.00017863752783247745, + "loss": 0.232, + "step": 8300 + }, + { + "epoch": 0.3208618093362678, + "grad_norm": 0.43169018626213074, + "learning_rate": 0.00017861178681287566, + "loss": 0.4033, + "step": 8310 + }, + { + "epoch": 0.3212479246302946, + "grad_norm": 0.8405828475952148, + "learning_rate": 0.00017858604579327388, + "loss": 0.3339, + "step": 8320 + }, + { + "epoch": 0.3216340399243214, + "grad_norm": 2.3412604331970215, + "learning_rate": 0.0001785603047736721, + "loss": 0.2781, + "step": 8330 + }, + { + "epoch": 0.3220201552183482, + "grad_norm": 2.412045478820801, + "learning_rate": 0.0001785345637540703, + "loss": 0.4346, + "step": 8340 + }, + { + "epoch": 0.32240627051237497, + "grad_norm": 3.626305341720581, + "learning_rate": 0.00017850882273446852, + "loss": 0.327, + "step": 8350 + }, + { + "epoch": 0.3227923858064018, + "grad_norm": 0.5645825266838074, + "learning_rate": 0.00017848308171486673, + "loss": 0.234, + "step": 8360 + }, + { + "epoch": 0.32317850110042856, + "grad_norm": 4.27307653427124, + "learning_rate": 0.00017845734069526494, + "loss": 0.5493, + "step": 8370 + }, + { + "epoch": 0.3235646163944554, + "grad_norm": 0.4511154890060425, + "learning_rate": 0.00017843159967566316, + "loss": 0.3501, + "step": 8380 + }, + { + "epoch": 0.3239507316884822, + "grad_norm": 0.314996600151062, + "learning_rate": 0.00017840585865606137, + "loss": 0.3544, + "step": 8390 + }, + { + "epoch": 0.324336846982509, + "grad_norm": 1.6546530723571777, + "learning_rate": 0.00017838011763645958, + "loss": 0.2455, + "step": 8400 + }, + { + "epoch": 0.3247229622765358, + "grad_norm": 3.2812252044677734, + "learning_rate": 0.0001783543766168578, + "loss": 0.3333, + "step": 8410 + }, + { + "epoch": 0.32510907757056257, + "grad_norm": 3.5717616081237793, + "learning_rate": 0.000178328635597256, + "loss": 0.4679, + "step": 8420 + }, + { + "epoch": 0.3254951928645894, + "grad_norm": 1.12017023563385, + "learning_rate": 0.00017830289457765422, + "loss": 0.3481, + "step": 8430 + }, + { + "epoch": 0.32588130815861616, + "grad_norm": 1.869462490081787, + "learning_rate": 0.00017827715355805244, + "loss": 0.4566, + "step": 8440 + }, + { + "epoch": 0.326267423452643, + "grad_norm": 1.4613149166107178, + "learning_rate": 0.00017825141253845065, + "loss": 0.5456, + "step": 8450 + }, + { + "epoch": 0.32665353874666975, + "grad_norm": 0.6842670440673828, + "learning_rate": 0.0001782256715188489, + "loss": 0.2776, + "step": 8460 + }, + { + "epoch": 0.32703965404069657, + "grad_norm": 2.9485504627227783, + "learning_rate": 0.00017819993049924708, + "loss": 0.3204, + "step": 8470 + }, + { + "epoch": 0.32742576933472334, + "grad_norm": 3.2084853649139404, + "learning_rate": 0.0001781741894796453, + "loss": 0.3391, + "step": 8480 + }, + { + "epoch": 0.32781188462875016, + "grad_norm": 1.9732774496078491, + "learning_rate": 0.0001781484484600435, + "loss": 0.3283, + "step": 8490 + }, + { + "epoch": 0.32819799992277693, + "grad_norm": 0.6378610134124756, + "learning_rate": 0.00017812270744044172, + "loss": 0.4519, + "step": 8500 + }, + { + "epoch": 0.32858411521680375, + "grad_norm": 4.108947277069092, + "learning_rate": 0.00017809696642083993, + "loss": 0.4933, + "step": 8510 + }, + { + "epoch": 0.3289702305108305, + "grad_norm": 2.7623212337493896, + "learning_rate": 0.00017807122540123814, + "loss": 0.4197, + "step": 8520 + }, + { + "epoch": 0.32935634580485734, + "grad_norm": 1.8904645442962646, + "learning_rate": 0.00017804548438163638, + "loss": 0.325, + "step": 8530 + }, + { + "epoch": 0.3297424610988841, + "grad_norm": 0.5131659507751465, + "learning_rate": 0.00017801974336203457, + "loss": 0.2403, + "step": 8540 + }, + { + "epoch": 0.33012857639291093, + "grad_norm": 2.965916633605957, + "learning_rate": 0.0001779940023424328, + "loss": 0.4597, + "step": 8550 + }, + { + "epoch": 0.3305146916869377, + "grad_norm": 1.5409698486328125, + "learning_rate": 0.000177968261322831, + "loss": 0.4594, + "step": 8560 + }, + { + "epoch": 0.3309008069809645, + "grad_norm": 1.1746805906295776, + "learning_rate": 0.0001779425203032292, + "loss": 0.4581, + "step": 8570 + }, + { + "epoch": 0.3312869222749913, + "grad_norm": 4.493356227874756, + "learning_rate": 0.00017791677928362745, + "loss": 0.4699, + "step": 8580 + }, + { + "epoch": 0.3316730375690181, + "grad_norm": 3.506526470184326, + "learning_rate": 0.00017789103826402564, + "loss": 0.3974, + "step": 8590 + }, + { + "epoch": 0.3320591528630449, + "grad_norm": 2.3893234729766846, + "learning_rate": 0.00017786529724442388, + "loss": 0.2823, + "step": 8600 + }, + { + "epoch": 0.3324452681570717, + "grad_norm": 1.6228163242340088, + "learning_rate": 0.00017783955622482206, + "loss": 0.4199, + "step": 8610 + }, + { + "epoch": 0.3328313834510985, + "grad_norm": 3.2869131565093994, + "learning_rate": 0.0001778138152052203, + "loss": 0.3173, + "step": 8620 + }, + { + "epoch": 0.3332174987451253, + "grad_norm": 5.547116279602051, + "learning_rate": 0.0001777880741856185, + "loss": 0.4584, + "step": 8630 + }, + { + "epoch": 0.33360361403915206, + "grad_norm": 1.3338594436645508, + "learning_rate": 0.0001777623331660167, + "loss": 0.4235, + "step": 8640 + }, + { + "epoch": 0.3339897293331789, + "grad_norm": 1.9165093898773193, + "learning_rate": 0.00017773659214641494, + "loss": 0.2989, + "step": 8650 + }, + { + "epoch": 0.3343758446272057, + "grad_norm": 1.968935251235962, + "learning_rate": 0.00017771085112681313, + "loss": 0.4194, + "step": 8660 + }, + { + "epoch": 0.3347619599212325, + "grad_norm": 9.66997241973877, + "learning_rate": 0.00017768511010721137, + "loss": 0.5818, + "step": 8670 + }, + { + "epoch": 0.3351480752152593, + "grad_norm": 2.3636281490325928, + "learning_rate": 0.00017765936908760956, + "loss": 0.3317, + "step": 8680 + }, + { + "epoch": 0.33553419050928607, + "grad_norm": 3.3569977283477783, + "learning_rate": 0.0001776336280680078, + "loss": 0.4388, + "step": 8690 + }, + { + "epoch": 0.3359203058033129, + "grad_norm": 1.2452306747436523, + "learning_rate": 0.00017760788704840598, + "loss": 0.1368, + "step": 8700 + }, + { + "epoch": 0.33630642109733966, + "grad_norm": 0.0380173958837986, + "learning_rate": 0.0001775821460288042, + "loss": 0.3264, + "step": 8710 + }, + { + "epoch": 0.3366925363913665, + "grad_norm": 1.5271002054214478, + "learning_rate": 0.00017755640500920244, + "loss": 0.2943, + "step": 8720 + }, + { + "epoch": 0.33707865168539325, + "grad_norm": 0.9701687693595886, + "learning_rate": 0.00017753066398960062, + "loss": 0.353, + "step": 8730 + }, + { + "epoch": 0.33746476697942007, + "grad_norm": 1.9296154975891113, + "learning_rate": 0.00017750492296999886, + "loss": 0.3776, + "step": 8740 + }, + { + "epoch": 0.33785088227344684, + "grad_norm": 1.2136276960372925, + "learning_rate": 0.00017747918195039705, + "loss": 0.5126, + "step": 8750 + }, + { + "epoch": 0.33823699756747366, + "grad_norm": 1.7323212623596191, + "learning_rate": 0.0001774534409307953, + "loss": 0.3477, + "step": 8760 + }, + { + "epoch": 0.33862311286150043, + "grad_norm": 1.164534091949463, + "learning_rate": 0.0001774276999111935, + "loss": 0.4053, + "step": 8770 + }, + { + "epoch": 0.33900922815552725, + "grad_norm": 0.42989471554756165, + "learning_rate": 0.0001774019588915917, + "loss": 0.3026, + "step": 8780 + }, + { + "epoch": 0.339395343449554, + "grad_norm": 2.357590436935425, + "learning_rate": 0.00017737621787198993, + "loss": 0.3869, + "step": 8790 + }, + { + "epoch": 0.33978145874358084, + "grad_norm": 1.9374550580978394, + "learning_rate": 0.00017735047685238812, + "loss": 0.2975, + "step": 8800 + }, + { + "epoch": 0.3401675740376076, + "grad_norm": 4.8107428550720215, + "learning_rate": 0.00017732473583278636, + "loss": 0.3959, + "step": 8810 + }, + { + "epoch": 0.34055368933163443, + "grad_norm": 1.938700556755066, + "learning_rate": 0.00017729899481318454, + "loss": 0.3726, + "step": 8820 + }, + { + "epoch": 0.3409398046256612, + "grad_norm": 3.147167682647705, + "learning_rate": 0.00017727325379358278, + "loss": 0.1828, + "step": 8830 + }, + { + "epoch": 0.341325919919688, + "grad_norm": 1.8921313285827637, + "learning_rate": 0.000177247512773981, + "loss": 0.2038, + "step": 8840 + }, + { + "epoch": 0.3417120352137148, + "grad_norm": 0.9098349213600159, + "learning_rate": 0.00017722177175437918, + "loss": 0.2853, + "step": 8850 + }, + { + "epoch": 0.3420981505077416, + "grad_norm": 2.4006853103637695, + "learning_rate": 0.00017719603073477742, + "loss": 0.6054, + "step": 8860 + }, + { + "epoch": 0.3424842658017684, + "grad_norm": 1.9303867816925049, + "learning_rate": 0.0001771702897151756, + "loss": 0.3507, + "step": 8870 + }, + { + "epoch": 0.3428703810957952, + "grad_norm": 0.49361029267311096, + "learning_rate": 0.00017714454869557385, + "loss": 0.4661, + "step": 8880 + }, + { + "epoch": 0.343256496389822, + "grad_norm": 2.542618751525879, + "learning_rate": 0.00017711880767597204, + "loss": 0.6924, + "step": 8890 + }, + { + "epoch": 0.3436426116838488, + "grad_norm": 0.5868918895721436, + "learning_rate": 0.00017709306665637028, + "loss": 0.4507, + "step": 8900 + }, + { + "epoch": 0.3440287269778756, + "grad_norm": 2.4685137271881104, + "learning_rate": 0.0001770673256367685, + "loss": 0.4538, + "step": 8910 + }, + { + "epoch": 0.3444148422719024, + "grad_norm": 2.6662702560424805, + "learning_rate": 0.00017704158461716668, + "loss": 0.6181, + "step": 8920 + }, + { + "epoch": 0.3448009575659292, + "grad_norm": 1.705103874206543, + "learning_rate": 0.00017701584359756492, + "loss": 0.481, + "step": 8930 + }, + { + "epoch": 0.345187072859956, + "grad_norm": 2.0710952281951904, + "learning_rate": 0.0001769901025779631, + "loss": 0.4357, + "step": 8940 + }, + { + "epoch": 0.3455731881539828, + "grad_norm": 3.487117290496826, + "learning_rate": 0.00017696436155836134, + "loss": 0.4572, + "step": 8950 + }, + { + "epoch": 0.34595930344800957, + "grad_norm": 3.03472900390625, + "learning_rate": 0.00017693862053875956, + "loss": 0.4437, + "step": 8960 + }, + { + "epoch": 0.3463454187420364, + "grad_norm": 1.310692548751831, + "learning_rate": 0.00017691287951915777, + "loss": 0.4218, + "step": 8970 + }, + { + "epoch": 0.34673153403606316, + "grad_norm": 4.131219387054443, + "learning_rate": 0.00017688713849955598, + "loss": 0.445, + "step": 8980 + }, + { + "epoch": 0.34711764933009, + "grad_norm": 1.4199285507202148, + "learning_rate": 0.00017686139747995417, + "loss": 0.3093, + "step": 8990 + }, + { + "epoch": 0.34750376462411675, + "grad_norm": 1.9338914155960083, + "learning_rate": 0.0001768356564603524, + "loss": 0.275, + "step": 9000 + }, + { + "epoch": 0.3478898799181436, + "grad_norm": 3.425877332687378, + "learning_rate": 0.0001768099154407506, + "loss": 0.4621, + "step": 9010 + }, + { + "epoch": 0.34827599521217034, + "grad_norm": 3.6350486278533936, + "learning_rate": 0.00017678417442114884, + "loss": 0.3313, + "step": 9020 + }, + { + "epoch": 0.34866211050619716, + "grad_norm": 3.339202880859375, + "learning_rate": 0.00017675843340154705, + "loss": 0.5958, + "step": 9030 + }, + { + "epoch": 0.34904822580022393, + "grad_norm": 4.148682117462158, + "learning_rate": 0.00017673269238194526, + "loss": 0.2384, + "step": 9040 + }, + { + "epoch": 0.34943434109425076, + "grad_norm": 0.9697182178497314, + "learning_rate": 0.00017670695136234348, + "loss": 0.3119, + "step": 9050 + }, + { + "epoch": 0.3498204563882775, + "grad_norm": 0.53201824426651, + "learning_rate": 0.0001766812103427417, + "loss": 0.4339, + "step": 9060 + }, + { + "epoch": 0.35020657168230435, + "grad_norm": 0.9727185368537903, + "learning_rate": 0.0001766554693231399, + "loss": 0.3289, + "step": 9070 + }, + { + "epoch": 0.3505926869763311, + "grad_norm": 4.32904052734375, + "learning_rate": 0.00017662972830353812, + "loss": 0.4673, + "step": 9080 + }, + { + "epoch": 0.35097880227035794, + "grad_norm": 2.511558771133423, + "learning_rate": 0.00017660398728393633, + "loss": 0.2257, + "step": 9090 + }, + { + "epoch": 0.3513649175643847, + "grad_norm": 1.8378714323043823, + "learning_rate": 0.00017657824626433454, + "loss": 0.3977, + "step": 9100 + }, + { + "epoch": 0.3517510328584115, + "grad_norm": 1.3297137022018433, + "learning_rate": 0.00017655250524473276, + "loss": 0.3541, + "step": 9110 + }, + { + "epoch": 0.3521371481524383, + "grad_norm": 3.253089666366577, + "learning_rate": 0.00017652676422513097, + "loss": 0.6326, + "step": 9120 + }, + { + "epoch": 0.3525232634464651, + "grad_norm": 0.9691923260688782, + "learning_rate": 0.00017650102320552918, + "loss": 0.2206, + "step": 9130 + }, + { + "epoch": 0.3529093787404919, + "grad_norm": 1.570204496383667, + "learning_rate": 0.0001764752821859274, + "loss": 0.2769, + "step": 9140 + }, + { + "epoch": 0.3532954940345187, + "grad_norm": 1.9307161569595337, + "learning_rate": 0.0001764495411663256, + "loss": 0.3149, + "step": 9150 + }, + { + "epoch": 0.3536816093285455, + "grad_norm": 2.783297300338745, + "learning_rate": 0.00017642380014672382, + "loss": 0.3912, + "step": 9160 + }, + { + "epoch": 0.3540677246225723, + "grad_norm": 2.193371057510376, + "learning_rate": 0.00017639805912712204, + "loss": 0.3782, + "step": 9170 + }, + { + "epoch": 0.3544538399165991, + "grad_norm": 2.3460335731506348, + "learning_rate": 0.00017637231810752025, + "loss": 0.5051, + "step": 9180 + }, + { + "epoch": 0.3548399552106259, + "grad_norm": 2.4668326377868652, + "learning_rate": 0.00017634657708791846, + "loss": 0.2899, + "step": 9190 + }, + { + "epoch": 0.3552260705046527, + "grad_norm": 2.004683017730713, + "learning_rate": 0.00017632083606831668, + "loss": 0.3137, + "step": 9200 + }, + { + "epoch": 0.3556121857986795, + "grad_norm": 6.333971977233887, + "learning_rate": 0.0001762950950487149, + "loss": 0.5027, + "step": 9210 + }, + { + "epoch": 0.3559983010927063, + "grad_norm": 1.7840352058410645, + "learning_rate": 0.0001762693540291131, + "loss": 0.3988, + "step": 9220 + }, + { + "epoch": 0.35638441638673307, + "grad_norm": 0.9257024526596069, + "learning_rate": 0.00017624361300951132, + "loss": 0.3662, + "step": 9230 + }, + { + "epoch": 0.3567705316807599, + "grad_norm": 2.582887887954712, + "learning_rate": 0.00017621787198990953, + "loss": 0.2863, + "step": 9240 + }, + { + "epoch": 0.35715664697478666, + "grad_norm": 3.119943380355835, + "learning_rate": 0.00017619213097030774, + "loss": 0.4041, + "step": 9250 + }, + { + "epoch": 0.3575427622688135, + "grad_norm": 2.2561371326446533, + "learning_rate": 0.00017616638995070596, + "loss": 0.3969, + "step": 9260 + }, + { + "epoch": 0.35792887756284025, + "grad_norm": 2.104891538619995, + "learning_rate": 0.00017614064893110417, + "loss": 0.3216, + "step": 9270 + }, + { + "epoch": 0.3583149928568671, + "grad_norm": 1.6922805309295654, + "learning_rate": 0.00017611490791150238, + "loss": 0.3828, + "step": 9280 + }, + { + "epoch": 0.35870110815089384, + "grad_norm": 1.0928469896316528, + "learning_rate": 0.0001760891668919006, + "loss": 0.3225, + "step": 9290 + }, + { + "epoch": 0.35908722344492067, + "grad_norm": 2.4089863300323486, + "learning_rate": 0.0001760634258722988, + "loss": 0.4143, + "step": 9300 + }, + { + "epoch": 0.35947333873894743, + "grad_norm": 0.5562119483947754, + "learning_rate": 0.00017603768485269702, + "loss": 0.4597, + "step": 9310 + }, + { + "epoch": 0.35985945403297426, + "grad_norm": 1.3904486894607544, + "learning_rate": 0.00017601194383309524, + "loss": 0.4462, + "step": 9320 + }, + { + "epoch": 0.360245569327001, + "grad_norm": 2.1393306255340576, + "learning_rate": 0.00017598620281349345, + "loss": 0.2613, + "step": 9330 + }, + { + "epoch": 0.36063168462102785, + "grad_norm": 1.3657029867172241, + "learning_rate": 0.00017596046179389166, + "loss": 0.4968, + "step": 9340 + }, + { + "epoch": 0.3610177999150546, + "grad_norm": 2.424880027770996, + "learning_rate": 0.00017593472077428988, + "loss": 0.5982, + "step": 9350 + }, + { + "epoch": 0.36140391520908144, + "grad_norm": 6.178807735443115, + "learning_rate": 0.0001759089797546881, + "loss": 0.5355, + "step": 9360 + }, + { + "epoch": 0.3617900305031082, + "grad_norm": 1.5572419166564941, + "learning_rate": 0.0001758832387350863, + "loss": 0.4435, + "step": 9370 + }, + { + "epoch": 0.36217614579713503, + "grad_norm": 0.46649104356765747, + "learning_rate": 0.00017585749771548452, + "loss": 0.352, + "step": 9380 + }, + { + "epoch": 0.3625622610911618, + "grad_norm": 1.9611142873764038, + "learning_rate": 0.00017583175669588276, + "loss": 0.2684, + "step": 9390 + }, + { + "epoch": 0.3629483763851886, + "grad_norm": 1.7648595571517944, + "learning_rate": 0.00017580601567628094, + "loss": 0.3186, + "step": 9400 + }, + { + "epoch": 0.3633344916792154, + "grad_norm": 1.7970843315124512, + "learning_rate": 0.00017578027465667916, + "loss": 0.5339, + "step": 9410 + }, + { + "epoch": 0.3637206069732422, + "grad_norm": 3.084897994995117, + "learning_rate": 0.00017575453363707737, + "loss": 0.5143, + "step": 9420 + }, + { + "epoch": 0.36410672226726903, + "grad_norm": 1.440626621246338, + "learning_rate": 0.00017572879261747558, + "loss": 0.4067, + "step": 9430 + }, + { + "epoch": 0.3644928375612958, + "grad_norm": 0.44918450713157654, + "learning_rate": 0.0001757030515978738, + "loss": 0.2306, + "step": 9440 + }, + { + "epoch": 0.3648789528553226, + "grad_norm": 2.617272138595581, + "learning_rate": 0.000175677310578272, + "loss": 0.3166, + "step": 9450 + }, + { + "epoch": 0.3652650681493494, + "grad_norm": 2.575073719024658, + "learning_rate": 0.00017565156955867025, + "loss": 0.6645, + "step": 9460 + }, + { + "epoch": 0.3656511834433762, + "grad_norm": 0.9430664777755737, + "learning_rate": 0.00017562582853906844, + "loss": 0.2753, + "step": 9470 + }, + { + "epoch": 0.366037298737403, + "grad_norm": 1.9400445222854614, + "learning_rate": 0.00017560008751946665, + "loss": 0.4689, + "step": 9480 + }, + { + "epoch": 0.3664234140314298, + "grad_norm": 4.0443220138549805, + "learning_rate": 0.00017557434649986486, + "loss": 0.5373, + "step": 9490 + }, + { + "epoch": 0.3668095293254566, + "grad_norm": 3.4999184608459473, + "learning_rate": 0.00017554860548026308, + "loss": 0.3412, + "step": 9500 + }, + { + "epoch": 0.3671956446194834, + "grad_norm": 2.2023515701293945, + "learning_rate": 0.0001755228644606613, + "loss": 0.3385, + "step": 9510 + }, + { + "epoch": 0.36758175991351016, + "grad_norm": 1.213641881942749, + "learning_rate": 0.0001754971234410595, + "loss": 0.4785, + "step": 9520 + }, + { + "epoch": 0.367967875207537, + "grad_norm": 0.4178420603275299, + "learning_rate": 0.00017547138242145774, + "loss": 0.2605, + "step": 9530 + }, + { + "epoch": 0.36835399050156376, + "grad_norm": 2.676564931869507, + "learning_rate": 0.00017544564140185593, + "loss": 0.5297, + "step": 9540 + }, + { + "epoch": 0.3687401057955906, + "grad_norm": 0.8604353070259094, + "learning_rate": 0.00017541990038225414, + "loss": 0.3983, + "step": 9550 + }, + { + "epoch": 0.36912622108961735, + "grad_norm": 1.298893690109253, + "learning_rate": 0.00017539415936265236, + "loss": 0.3229, + "step": 9560 + }, + { + "epoch": 0.36951233638364417, + "grad_norm": 4.109025478363037, + "learning_rate": 0.00017536841834305057, + "loss": 0.519, + "step": 9570 + }, + { + "epoch": 0.36989845167767094, + "grad_norm": 3.440915584564209, + "learning_rate": 0.0001753426773234488, + "loss": 0.4061, + "step": 9580 + }, + { + "epoch": 0.37028456697169776, + "grad_norm": 0.1484186202287674, + "learning_rate": 0.000175316936303847, + "loss": 0.3443, + "step": 9590 + }, + { + "epoch": 0.3706706822657245, + "grad_norm": 3.114328145980835, + "learning_rate": 0.00017529119528424524, + "loss": 0.2451, + "step": 9600 + }, + { + "epoch": 0.37105679755975135, + "grad_norm": 1.8218796253204346, + "learning_rate": 0.00017526545426464342, + "loss": 0.2511, + "step": 9610 + }, + { + "epoch": 0.3714429128537781, + "grad_norm": 1.0732795000076294, + "learning_rate": 0.00017523971324504164, + "loss": 0.1581, + "step": 9620 + }, + { + "epoch": 0.37182902814780494, + "grad_norm": 1.0567959547042847, + "learning_rate": 0.00017521397222543985, + "loss": 0.1924, + "step": 9630 + }, + { + "epoch": 0.3722151434418317, + "grad_norm": 0.3467637896537781, + "learning_rate": 0.00017518823120583806, + "loss": 0.3571, + "step": 9640 + }, + { + "epoch": 0.37260125873585853, + "grad_norm": 2.6293838024139404, + "learning_rate": 0.0001751624901862363, + "loss": 0.3282, + "step": 9650 + }, + { + "epoch": 0.3729873740298853, + "grad_norm": 1.159696102142334, + "learning_rate": 0.0001751367491666345, + "loss": 0.2636, + "step": 9660 + }, + { + "epoch": 0.3733734893239121, + "grad_norm": 0.6884826421737671, + "learning_rate": 0.00017511100814703273, + "loss": 0.2842, + "step": 9670 + }, + { + "epoch": 0.3737596046179389, + "grad_norm": 3.789825201034546, + "learning_rate": 0.00017508526712743091, + "loss": 0.599, + "step": 9680 + }, + { + "epoch": 0.3741457199119657, + "grad_norm": 1.0705493688583374, + "learning_rate": 0.00017505952610782913, + "loss": 0.1746, + "step": 9690 + }, + { + "epoch": 0.37453183520599254, + "grad_norm": 1.8735803365707397, + "learning_rate": 0.00017503378508822734, + "loss": 0.3259, + "step": 9700 + }, + { + "epoch": 0.3749179505000193, + "grad_norm": 1.2987112998962402, + "learning_rate": 0.00017500804406862555, + "loss": 0.5738, + "step": 9710 + }, + { + "epoch": 0.3753040657940461, + "grad_norm": 1.5362507104873657, + "learning_rate": 0.0001749823030490238, + "loss": 0.3815, + "step": 9720 + }, + { + "epoch": 0.3756901810880729, + "grad_norm": 0.1640123724937439, + "learning_rate": 0.00017495656202942198, + "loss": 0.3672, + "step": 9730 + }, + { + "epoch": 0.3760762963820997, + "grad_norm": 0.6714594960212708, + "learning_rate": 0.00017493082100982022, + "loss": 0.2849, + "step": 9740 + }, + { + "epoch": 0.3764624116761265, + "grad_norm": 4.330246448516846, + "learning_rate": 0.0001749050799902184, + "loss": 0.4023, + "step": 9750 + }, + { + "epoch": 0.3768485269701533, + "grad_norm": 0.8616659641265869, + "learning_rate": 0.00017487933897061665, + "loss": 0.4434, + "step": 9760 + }, + { + "epoch": 0.3772346422641801, + "grad_norm": 2.6581578254699707, + "learning_rate": 0.00017485359795101486, + "loss": 0.4854, + "step": 9770 + }, + { + "epoch": 0.3776207575582069, + "grad_norm": 1.8269850015640259, + "learning_rate": 0.00017482785693141305, + "loss": 0.6033, + "step": 9780 + }, + { + "epoch": 0.37800687285223367, + "grad_norm": 2.256073236465454, + "learning_rate": 0.0001748021159118113, + "loss": 0.5317, + "step": 9790 + }, + { + "epoch": 0.3783929881462605, + "grad_norm": 0.8793076872825623, + "learning_rate": 0.00017477637489220947, + "loss": 0.3883, + "step": 9800 + }, + { + "epoch": 0.37877910344028726, + "grad_norm": 1.71831214427948, + "learning_rate": 0.00017475063387260772, + "loss": 0.2473, + "step": 9810 + }, + { + "epoch": 0.3791652187343141, + "grad_norm": 3.4802069664001465, + "learning_rate": 0.0001747248928530059, + "loss": 0.4847, + "step": 9820 + }, + { + "epoch": 0.37955133402834085, + "grad_norm": 5.419053077697754, + "learning_rate": 0.00017469915183340414, + "loss": 0.3668, + "step": 9830 + }, + { + "epoch": 0.37993744932236767, + "grad_norm": 1.567060112953186, + "learning_rate": 0.00017467341081380236, + "loss": 0.3342, + "step": 9840 + }, + { + "epoch": 0.38032356461639444, + "grad_norm": 3.0100274085998535, + "learning_rate": 0.00017464766979420054, + "loss": 0.476, + "step": 9850 + }, + { + "epoch": 0.38070967991042126, + "grad_norm": 0.7659344673156738, + "learning_rate": 0.00017462192877459878, + "loss": 0.2608, + "step": 9860 + }, + { + "epoch": 0.38109579520444803, + "grad_norm": 3.9540984630584717, + "learning_rate": 0.00017459618775499697, + "loss": 0.763, + "step": 9870 + }, + { + "epoch": 0.38148191049847485, + "grad_norm": 0.8768689036369324, + "learning_rate": 0.0001745704467353952, + "loss": 0.3365, + "step": 9880 + }, + { + "epoch": 0.3818680257925016, + "grad_norm": 0.9985928535461426, + "learning_rate": 0.00017454470571579342, + "loss": 0.3116, + "step": 9890 + }, + { + "epoch": 0.38225414108652844, + "grad_norm": 2.0326671600341797, + "learning_rate": 0.00017451896469619163, + "loss": 0.289, + "step": 9900 + }, + { + "epoch": 0.3826402563805552, + "grad_norm": 3.2696290016174316, + "learning_rate": 0.00017449322367658985, + "loss": 0.4097, + "step": 9910 + }, + { + "epoch": 0.38302637167458203, + "grad_norm": 3.048860788345337, + "learning_rate": 0.00017446748265698803, + "loss": 0.5181, + "step": 9920 + }, + { + "epoch": 0.3834124869686088, + "grad_norm": 1.7899913787841797, + "learning_rate": 0.00017444174163738627, + "loss": 0.2166, + "step": 9930 + }, + { + "epoch": 0.3837986022626356, + "grad_norm": 3.6762959957122803, + "learning_rate": 0.00017441600061778446, + "loss": 0.4971, + "step": 9940 + }, + { + "epoch": 0.3841847175566624, + "grad_norm": 0.9108519554138184, + "learning_rate": 0.0001743902595981827, + "loss": 0.4974, + "step": 9950 + }, + { + "epoch": 0.3845708328506892, + "grad_norm": 4.062527656555176, + "learning_rate": 0.00017436451857858091, + "loss": 0.4448, + "step": 9960 + }, + { + "epoch": 0.38495694814471604, + "grad_norm": 3.230902671813965, + "learning_rate": 0.00017433877755897913, + "loss": 0.2977, + "step": 9970 + }, + { + "epoch": 0.3853430634387428, + "grad_norm": 3.8190758228302, + "learning_rate": 0.00017431303653937734, + "loss": 0.4887, + "step": 9980 + }, + { + "epoch": 0.38572917873276963, + "grad_norm": 0.9079695343971252, + "learning_rate": 0.00017428729551977553, + "loss": 0.271, + "step": 9990 + }, + { + "epoch": 0.3861152940267964, + "grad_norm": 3.3730807304382324, + "learning_rate": 0.00017426155450017377, + "loss": 0.3782, + "step": 10000 + }, + { + "epoch": 0.3865014093208232, + "grad_norm": 1.07533860206604, + "learning_rate": 0.00017423581348057195, + "loss": 0.3905, + "step": 10010 + }, + { + "epoch": 0.38688752461485, + "grad_norm": 1.3856415748596191, + "learning_rate": 0.0001742100724609702, + "loss": 0.3757, + "step": 10020 + }, + { + "epoch": 0.3872736399088768, + "grad_norm": 5.751671314239502, + "learning_rate": 0.0001741843314413684, + "loss": 0.6657, + "step": 10030 + }, + { + "epoch": 0.3876597552029036, + "grad_norm": 0.6837680339813232, + "learning_rate": 0.00017415859042176662, + "loss": 0.2318, + "step": 10040 + }, + { + "epoch": 0.3880458704969304, + "grad_norm": 2.770787239074707, + "learning_rate": 0.00017413284940216483, + "loss": 0.3706, + "step": 10050 + }, + { + "epoch": 0.38843198579095717, + "grad_norm": 2.3058855533599854, + "learning_rate": 0.00017410710838256302, + "loss": 0.1641, + "step": 10060 + }, + { + "epoch": 0.388818101084984, + "grad_norm": 1.894718885421753, + "learning_rate": 0.00017408136736296126, + "loss": 0.4752, + "step": 10070 + }, + { + "epoch": 0.38920421637901076, + "grad_norm": 1.8346868753433228, + "learning_rate": 0.00017405562634335947, + "loss": 0.5007, + "step": 10080 + }, + { + "epoch": 0.3895903316730376, + "grad_norm": 5.277680397033691, + "learning_rate": 0.0001740298853237577, + "loss": 0.4399, + "step": 10090 + }, + { + "epoch": 0.38997644696706435, + "grad_norm": 1.306093692779541, + "learning_rate": 0.0001740041443041559, + "loss": 0.371, + "step": 10100 + }, + { + "epoch": 0.3903625622610912, + "grad_norm": 3.0306456089019775, + "learning_rate": 0.00017397840328455411, + "loss": 0.2515, + "step": 10110 + }, + { + "epoch": 0.39074867755511794, + "grad_norm": 0.7951543927192688, + "learning_rate": 0.00017395266226495233, + "loss": 0.3775, + "step": 10120 + }, + { + "epoch": 0.39113479284914476, + "grad_norm": 5.185150146484375, + "learning_rate": 0.00017392692124535051, + "loss": 0.3591, + "step": 10130 + }, + { + "epoch": 0.39152090814317153, + "grad_norm": 1.1718593835830688, + "learning_rate": 0.00017390118022574875, + "loss": 0.5484, + "step": 10140 + }, + { + "epoch": 0.39190702343719835, + "grad_norm": 1.6352128982543945, + "learning_rate": 0.00017387543920614697, + "loss": 0.2817, + "step": 10150 + }, + { + "epoch": 0.3922931387312251, + "grad_norm": 2.4863786697387695, + "learning_rate": 0.00017384969818654518, + "loss": 0.4027, + "step": 10160 + }, + { + "epoch": 0.39267925402525194, + "grad_norm": 2.069805383682251, + "learning_rate": 0.0001738239571669434, + "loss": 0.3559, + "step": 10170 + }, + { + "epoch": 0.3930653693192787, + "grad_norm": 1.671980619430542, + "learning_rate": 0.0001737982161473416, + "loss": 0.4405, + "step": 10180 + }, + { + "epoch": 0.39345148461330554, + "grad_norm": 4.298947334289551, + "learning_rate": 0.00017377247512773982, + "loss": 0.3005, + "step": 10190 + }, + { + "epoch": 0.3938375999073323, + "grad_norm": 0.4142851233482361, + "learning_rate": 0.000173746734108138, + "loss": 0.4248, + "step": 10200 + }, + { + "epoch": 0.3942237152013591, + "grad_norm": 3.5962865352630615, + "learning_rate": 0.00017372099308853625, + "loss": 0.27, + "step": 10210 + }, + { + "epoch": 0.39460983049538595, + "grad_norm": 2.20154070854187, + "learning_rate": 0.00017369525206893446, + "loss": 0.2858, + "step": 10220 + }, + { + "epoch": 0.3949959457894127, + "grad_norm": 0.2400553673505783, + "learning_rate": 0.00017366951104933267, + "loss": 0.2806, + "step": 10230 + }, + { + "epoch": 0.39538206108343954, + "grad_norm": 1.817741870880127, + "learning_rate": 0.0001736437700297309, + "loss": 0.3647, + "step": 10240 + }, + { + "epoch": 0.3957681763774663, + "grad_norm": 4.890044689178467, + "learning_rate": 0.0001736180290101291, + "loss": 0.4435, + "step": 10250 + }, + { + "epoch": 0.39615429167149313, + "grad_norm": 0.3407624065876007, + "learning_rate": 0.00017359228799052731, + "loss": 0.4857, + "step": 10260 + }, + { + "epoch": 0.3965404069655199, + "grad_norm": 2.4883463382720947, + "learning_rate": 0.00017356654697092553, + "loss": 0.2667, + "step": 10270 + }, + { + "epoch": 0.3969265222595467, + "grad_norm": 2.343823194503784, + "learning_rate": 0.00017354080595132374, + "loss": 0.3711, + "step": 10280 + }, + { + "epoch": 0.3973126375535735, + "grad_norm": 0.2056214064359665, + "learning_rate": 0.00017351506493172195, + "loss": 0.2695, + "step": 10290 + }, + { + "epoch": 0.3976987528476003, + "grad_norm": 0.20321065187454224, + "learning_rate": 0.00017348932391212017, + "loss": 0.3079, + "step": 10300 + }, + { + "epoch": 0.3980848681416271, + "grad_norm": 0.7993821501731873, + "learning_rate": 0.00017346358289251838, + "loss": 0.3599, + "step": 10310 + }, + { + "epoch": 0.3984709834356539, + "grad_norm": 2.0987348556518555, + "learning_rate": 0.0001734378418729166, + "loss": 0.3259, + "step": 10320 + }, + { + "epoch": 0.39885709872968067, + "grad_norm": 2.474246025085449, + "learning_rate": 0.0001734121008533148, + "loss": 0.3398, + "step": 10330 + }, + { + "epoch": 0.3992432140237075, + "grad_norm": 2.341064214706421, + "learning_rate": 0.00017338635983371302, + "loss": 0.5264, + "step": 10340 + }, + { + "epoch": 0.39962932931773426, + "grad_norm": 1.587437629699707, + "learning_rate": 0.00017336061881411123, + "loss": 0.4228, + "step": 10350 + }, + { + "epoch": 0.4000154446117611, + "grad_norm": 0.6692029237747192, + "learning_rate": 0.00017333487779450945, + "loss": 0.3576, + "step": 10360 + }, + { + "epoch": 0.40040155990578785, + "grad_norm": 2.088212251663208, + "learning_rate": 0.00017330913677490766, + "loss": 0.3096, + "step": 10370 + }, + { + "epoch": 0.4007876751998147, + "grad_norm": 1.5051954984664917, + "learning_rate": 0.00017328339575530587, + "loss": 0.3753, + "step": 10380 + }, + { + "epoch": 0.40117379049384144, + "grad_norm": 2.02595591545105, + "learning_rate": 0.0001732576547357041, + "loss": 0.3339, + "step": 10390 + }, + { + "epoch": 0.40155990578786827, + "grad_norm": 1.3062909841537476, + "learning_rate": 0.0001732319137161023, + "loss": 0.4301, + "step": 10400 + }, + { + "epoch": 0.40194602108189503, + "grad_norm": 2.5890421867370605, + "learning_rate": 0.00017320617269650051, + "loss": 0.3047, + "step": 10410 + }, + { + "epoch": 0.40233213637592186, + "grad_norm": 1.5994844436645508, + "learning_rate": 0.00017318043167689873, + "loss": 0.4158, + "step": 10420 + }, + { + "epoch": 0.4027182516699486, + "grad_norm": 0.5470211505889893, + "learning_rate": 0.00017315469065729694, + "loss": 0.4513, + "step": 10430 + }, + { + "epoch": 0.40310436696397545, + "grad_norm": 2.216935634613037, + "learning_rate": 0.00017312894963769515, + "loss": 0.5123, + "step": 10440 + }, + { + "epoch": 0.4034904822580022, + "grad_norm": 2.354724645614624, + "learning_rate": 0.00017310320861809337, + "loss": 0.2804, + "step": 10450 + }, + { + "epoch": 0.40387659755202904, + "grad_norm": 4.514159202575684, + "learning_rate": 0.00017307746759849158, + "loss": 0.3317, + "step": 10460 + }, + { + "epoch": 0.4042627128460558, + "grad_norm": 0.9874318242073059, + "learning_rate": 0.0001730517265788898, + "loss": 0.1948, + "step": 10470 + }, + { + "epoch": 0.40464882814008263, + "grad_norm": 2.0725696086883545, + "learning_rate": 0.000173025985559288, + "loss": 0.3627, + "step": 10480 + }, + { + "epoch": 0.40503494343410945, + "grad_norm": 2.4061577320098877, + "learning_rate": 0.00017300024453968622, + "loss": 0.3074, + "step": 10490 + }, + { + "epoch": 0.4054210587281362, + "grad_norm": 1.3369660377502441, + "learning_rate": 0.00017297450352008443, + "loss": 0.533, + "step": 10500 + }, + { + "epoch": 0.40580717402216304, + "grad_norm": 1.2730306386947632, + "learning_rate": 0.00017294876250048265, + "loss": 0.4688, + "step": 10510 + }, + { + "epoch": 0.4061932893161898, + "grad_norm": 0.6753021478652954, + "learning_rate": 0.00017292302148088086, + "loss": 0.4427, + "step": 10520 + }, + { + "epoch": 0.40657940461021663, + "grad_norm": 1.7279945611953735, + "learning_rate": 0.0001728972804612791, + "loss": 0.4921, + "step": 10530 + }, + { + "epoch": 0.4069655199042434, + "grad_norm": 0.9288708567619324, + "learning_rate": 0.0001728715394416773, + "loss": 0.363, + "step": 10540 + }, + { + "epoch": 0.4073516351982702, + "grad_norm": 0.5325084924697876, + "learning_rate": 0.0001728457984220755, + "loss": 0.4095, + "step": 10550 + }, + { + "epoch": 0.407737750492297, + "grad_norm": 1.2030489444732666, + "learning_rate": 0.0001728200574024737, + "loss": 0.3499, + "step": 10560 + }, + { + "epoch": 0.4081238657863238, + "grad_norm": 3.8157269954681396, + "learning_rate": 0.00017279431638287193, + "loss": 0.1622, + "step": 10570 + }, + { + "epoch": 0.4085099810803506, + "grad_norm": 0.6373336911201477, + "learning_rate": 0.00017276857536327017, + "loss": 0.4657, + "step": 10580 + }, + { + "epoch": 0.4088960963743774, + "grad_norm": 2.2850074768066406, + "learning_rate": 0.00017274283434366835, + "loss": 0.3585, + "step": 10590 + }, + { + "epoch": 0.40928221166840417, + "grad_norm": 0.8831659555435181, + "learning_rate": 0.0001727170933240666, + "loss": 0.293, + "step": 10600 + }, + { + "epoch": 0.409668326962431, + "grad_norm": 5.1165995597839355, + "learning_rate": 0.00017269135230446478, + "loss": 0.6539, + "step": 10610 + }, + { + "epoch": 0.41005444225645776, + "grad_norm": 4.901204586029053, + "learning_rate": 0.000172665611284863, + "loss": 0.4628, + "step": 10620 + }, + { + "epoch": 0.4104405575504846, + "grad_norm": 2.1492419242858887, + "learning_rate": 0.0001726398702652612, + "loss": 0.277, + "step": 10630 + }, + { + "epoch": 0.41082667284451135, + "grad_norm": 3.56510853767395, + "learning_rate": 0.00017261412924565942, + "loss": 0.4696, + "step": 10640 + }, + { + "epoch": 0.4112127881385382, + "grad_norm": 2.054769992828369, + "learning_rate": 0.00017258838822605766, + "loss": 0.4093, + "step": 10650 + }, + { + "epoch": 0.41159890343256494, + "grad_norm": 2.133474826812744, + "learning_rate": 0.00017256264720645585, + "loss": 0.3604, + "step": 10660 + }, + { + "epoch": 0.41198501872659177, + "grad_norm": 2.5062367916107178, + "learning_rate": 0.0001725369061868541, + "loss": 0.3916, + "step": 10670 + }, + { + "epoch": 0.41237113402061853, + "grad_norm": 0.431570827960968, + "learning_rate": 0.00017251116516725227, + "loss": 0.4048, + "step": 10680 + }, + { + "epoch": 0.41275724931464536, + "grad_norm": 1.2092580795288086, + "learning_rate": 0.0001724854241476505, + "loss": 0.602, + "step": 10690 + }, + { + "epoch": 0.4131433646086721, + "grad_norm": 2.712398052215576, + "learning_rate": 0.00017245968312804873, + "loss": 0.4172, + "step": 10700 + }, + { + "epoch": 0.41352947990269895, + "grad_norm": 3.914670467376709, + "learning_rate": 0.0001724339421084469, + "loss": 0.3843, + "step": 10710 + }, + { + "epoch": 0.4139155951967257, + "grad_norm": 1.7062132358551025, + "learning_rate": 0.00017240820108884515, + "loss": 0.343, + "step": 10720 + }, + { + "epoch": 0.41430171049075254, + "grad_norm": 0.5837095379829407, + "learning_rate": 0.00017238246006924334, + "loss": 0.3872, + "step": 10730 + }, + { + "epoch": 0.41468782578477936, + "grad_norm": 1.098900556564331, + "learning_rate": 0.00017235671904964158, + "loss": 0.2062, + "step": 10740 + }, + { + "epoch": 0.41507394107880613, + "grad_norm": 1.2533438205718994, + "learning_rate": 0.00017233097803003977, + "loss": 0.141, + "step": 10750 + }, + { + "epoch": 0.41546005637283295, + "grad_norm": 0.8688085079193115, + "learning_rate": 0.00017230523701043798, + "loss": 0.3686, + "step": 10760 + }, + { + "epoch": 0.4158461716668597, + "grad_norm": 1.868402361869812, + "learning_rate": 0.00017227949599083622, + "loss": 0.449, + "step": 10770 + }, + { + "epoch": 0.41623228696088654, + "grad_norm": 0.7168850898742676, + "learning_rate": 0.0001722537549712344, + "loss": 0.2317, + "step": 10780 + }, + { + "epoch": 0.4166184022549133, + "grad_norm": 3.1062309741973877, + "learning_rate": 0.00017222801395163265, + "loss": 0.4655, + "step": 10790 + }, + { + "epoch": 0.41700451754894013, + "grad_norm": 2.7296605110168457, + "learning_rate": 0.00017220227293203083, + "loss": 0.3934, + "step": 10800 + }, + { + "epoch": 0.4173906328429669, + "grad_norm": 2.3148224353790283, + "learning_rate": 0.00017217653191242907, + "loss": 0.2367, + "step": 10810 + }, + { + "epoch": 0.4177767481369937, + "grad_norm": 0.7049677968025208, + "learning_rate": 0.00017215079089282726, + "loss": 0.3157, + "step": 10820 + }, + { + "epoch": 0.4181628634310205, + "grad_norm": 3.3960344791412354, + "learning_rate": 0.00017212504987322547, + "loss": 0.4945, + "step": 10830 + }, + { + "epoch": 0.4185489787250473, + "grad_norm": 2.606316566467285, + "learning_rate": 0.0001720993088536237, + "loss": 0.4056, + "step": 10840 + }, + { + "epoch": 0.4189350940190741, + "grad_norm": 1.7469319105148315, + "learning_rate": 0.0001720735678340219, + "loss": 0.4176, + "step": 10850 + }, + { + "epoch": 0.4193212093131009, + "grad_norm": 0.8538552522659302, + "learning_rate": 0.00017204782681442014, + "loss": 0.3025, + "step": 10860 + }, + { + "epoch": 0.4197073246071277, + "grad_norm": 1.9576159715652466, + "learning_rate": 0.00017202208579481833, + "loss": 0.5626, + "step": 10870 + }, + { + "epoch": 0.4200934399011545, + "grad_norm": 0.8435356616973877, + "learning_rate": 0.00017199634477521657, + "loss": 0.2397, + "step": 10880 + }, + { + "epoch": 0.42047955519518126, + "grad_norm": 1.3026552200317383, + "learning_rate": 0.00017197060375561478, + "loss": 0.4793, + "step": 10890 + }, + { + "epoch": 0.4208656704892081, + "grad_norm": 1.8935116529464722, + "learning_rate": 0.00017194486273601297, + "loss": 0.2459, + "step": 10900 + }, + { + "epoch": 0.42125178578323486, + "grad_norm": 0.7297415137290955, + "learning_rate": 0.0001719191217164112, + "loss": 0.4115, + "step": 10910 + }, + { + "epoch": 0.4216379010772617, + "grad_norm": 2.730445146560669, + "learning_rate": 0.0001718933806968094, + "loss": 0.3467, + "step": 10920 + }, + { + "epoch": 0.42202401637128845, + "grad_norm": 1.5462249517440796, + "learning_rate": 0.00017186763967720763, + "loss": 0.2319, + "step": 10930 + }, + { + "epoch": 0.42241013166531527, + "grad_norm": 2.173388957977295, + "learning_rate": 0.00017184189865760582, + "loss": 0.3664, + "step": 10940 + }, + { + "epoch": 0.42279624695934204, + "grad_norm": 0.9086957573890686, + "learning_rate": 0.00017181615763800406, + "loss": 0.3928, + "step": 10950 + }, + { + "epoch": 0.42318236225336886, + "grad_norm": 1.6344754695892334, + "learning_rate": 0.00017179041661840227, + "loss": 0.32, + "step": 10960 + }, + { + "epoch": 0.4235684775473956, + "grad_norm": 3.7620887756347656, + "learning_rate": 0.00017176467559880049, + "loss": 0.3998, + "step": 10970 + }, + { + "epoch": 0.42395459284142245, + "grad_norm": 2.3914058208465576, + "learning_rate": 0.0001717389345791987, + "loss": 0.3003, + "step": 10980 + }, + { + "epoch": 0.4243407081354492, + "grad_norm": 1.1183325052261353, + "learning_rate": 0.00017171319355959689, + "loss": 0.2408, + "step": 10990 + }, + { + "epoch": 0.42472682342947604, + "grad_norm": 1.5570834875106812, + "learning_rate": 0.00017168745253999513, + "loss": 0.2638, + "step": 11000 + }, + { + "epoch": 0.42511293872350286, + "grad_norm": 1.4825866222381592, + "learning_rate": 0.0001716617115203933, + "loss": 0.2774, + "step": 11010 + }, + { + "epoch": 0.42549905401752963, + "grad_norm": 1.5424071550369263, + "learning_rate": 0.00017163597050079155, + "loss": 0.3636, + "step": 11020 + }, + { + "epoch": 0.42588516931155646, + "grad_norm": 3.9182989597320557, + "learning_rate": 0.00017161022948118977, + "loss": 0.5319, + "step": 11030 + }, + { + "epoch": 0.4262712846055832, + "grad_norm": 3.7870359420776367, + "learning_rate": 0.00017158448846158798, + "loss": 0.316, + "step": 11040 + }, + { + "epoch": 0.42665739989961005, + "grad_norm": 3.6943869590759277, + "learning_rate": 0.0001715587474419862, + "loss": 0.4336, + "step": 11050 + }, + { + "epoch": 0.4270435151936368, + "grad_norm": 1.1681898832321167, + "learning_rate": 0.00017153300642238438, + "loss": 0.328, + "step": 11060 + }, + { + "epoch": 0.42742963048766364, + "grad_norm": 3.6428277492523193, + "learning_rate": 0.00017150726540278262, + "loss": 0.2609, + "step": 11070 + }, + { + "epoch": 0.4278157457816904, + "grad_norm": 0.8946434259414673, + "learning_rate": 0.00017148152438318083, + "loss": 0.5445, + "step": 11080 + }, + { + "epoch": 0.4282018610757172, + "grad_norm": 1.8038333654403687, + "learning_rate": 0.00017145578336357905, + "loss": 0.3548, + "step": 11090 + }, + { + "epoch": 0.428587976369744, + "grad_norm": 2.3430778980255127, + "learning_rate": 0.00017143004234397726, + "loss": 0.4831, + "step": 11100 + }, + { + "epoch": 0.4289740916637708, + "grad_norm": 1.3243132829666138, + "learning_rate": 0.00017140430132437547, + "loss": 0.5759, + "step": 11110 + }, + { + "epoch": 0.4293602069577976, + "grad_norm": 1.2575668096542358, + "learning_rate": 0.00017137856030477369, + "loss": 0.3289, + "step": 11120 + }, + { + "epoch": 0.4297463222518244, + "grad_norm": 1.5581884384155273, + "learning_rate": 0.00017135281928517187, + "loss": 0.309, + "step": 11130 + }, + { + "epoch": 0.4301324375458512, + "grad_norm": 2.736063241958618, + "learning_rate": 0.0001713270782655701, + "loss": 0.3964, + "step": 11140 + }, + { + "epoch": 0.430518552839878, + "grad_norm": 1.3930561542510986, + "learning_rate": 0.00017130133724596833, + "loss": 0.2277, + "step": 11150 + }, + { + "epoch": 0.43090466813390477, + "grad_norm": 1.7932826280593872, + "learning_rate": 0.00017127559622636654, + "loss": 0.2907, + "step": 11160 + }, + { + "epoch": 0.4312907834279316, + "grad_norm": 1.7017295360565186, + "learning_rate": 0.00017124985520676475, + "loss": 0.2986, + "step": 11170 + }, + { + "epoch": 0.43167689872195836, + "grad_norm": 1.3543587923049927, + "learning_rate": 0.00017122411418716297, + "loss": 0.4872, + "step": 11180 + }, + { + "epoch": 0.4320630140159852, + "grad_norm": 2.3927829265594482, + "learning_rate": 0.00017119837316756118, + "loss": 0.5281, + "step": 11190 + }, + { + "epoch": 0.43244912931001195, + "grad_norm": 2.916257619857788, + "learning_rate": 0.0001711726321479594, + "loss": 0.5364, + "step": 11200 + }, + { + "epoch": 0.43283524460403877, + "grad_norm": 4.717250823974609, + "learning_rate": 0.0001711468911283576, + "loss": 0.3744, + "step": 11210 + }, + { + "epoch": 0.43322135989806554, + "grad_norm": 1.806577444076538, + "learning_rate": 0.00017112115010875582, + "loss": 0.3377, + "step": 11220 + }, + { + "epoch": 0.43360747519209236, + "grad_norm": 3.186603546142578, + "learning_rate": 0.00017109540908915403, + "loss": 0.3421, + "step": 11230 + }, + { + "epoch": 0.43399359048611913, + "grad_norm": 5.388319969177246, + "learning_rate": 0.00017106966806955225, + "loss": 0.3589, + "step": 11240 + }, + { + "epoch": 0.43437970578014595, + "grad_norm": 1.368312954902649, + "learning_rate": 0.00017104392704995046, + "loss": 0.2677, + "step": 11250 + }, + { + "epoch": 0.4347658210741727, + "grad_norm": 0.9010117053985596, + "learning_rate": 0.00017101818603034867, + "loss": 0.3411, + "step": 11260 + }, + { + "epoch": 0.43515193636819954, + "grad_norm": 0.46370139718055725, + "learning_rate": 0.00017099244501074689, + "loss": 0.3531, + "step": 11270 + }, + { + "epoch": 0.43553805166222637, + "grad_norm": 2.778857469558716, + "learning_rate": 0.0001709667039911451, + "loss": 0.3953, + "step": 11280 + }, + { + "epoch": 0.43592416695625313, + "grad_norm": 0.45829036831855774, + "learning_rate": 0.0001709409629715433, + "loss": 0.3117, + "step": 11290 + }, + { + "epoch": 0.43631028225027996, + "grad_norm": 2.2053589820861816, + "learning_rate": 0.00017091522195194153, + "loss": 0.5104, + "step": 11300 + }, + { + "epoch": 0.4366963975443067, + "grad_norm": 5.166933059692383, + "learning_rate": 0.00017088948093233974, + "loss": 0.6913, + "step": 11310 + }, + { + "epoch": 0.43708251283833355, + "grad_norm": 1.5593189001083374, + "learning_rate": 0.00017086373991273795, + "loss": 0.305, + "step": 11320 + }, + { + "epoch": 0.4374686281323603, + "grad_norm": 2.01481556892395, + "learning_rate": 0.00017083799889313617, + "loss": 0.4576, + "step": 11330 + }, + { + "epoch": 0.43785474342638714, + "grad_norm": 2.200463056564331, + "learning_rate": 0.00017081225787353438, + "loss": 0.6491, + "step": 11340 + }, + { + "epoch": 0.4382408587204139, + "grad_norm": 0.8904009461402893, + "learning_rate": 0.0001707865168539326, + "loss": 0.134, + "step": 11350 + }, + { + "epoch": 0.43862697401444073, + "grad_norm": 0.7481307983398438, + "learning_rate": 0.0001707607758343308, + "loss": 0.5552, + "step": 11360 + }, + { + "epoch": 0.4390130893084675, + "grad_norm": 1.0893138647079468, + "learning_rate": 0.00017073503481472902, + "loss": 0.2369, + "step": 11370 + }, + { + "epoch": 0.4393992046024943, + "grad_norm": 3.3567726612091064, + "learning_rate": 0.00017070929379512723, + "loss": 0.1916, + "step": 11380 + }, + { + "epoch": 0.4397853198965211, + "grad_norm": 0.5970168709754944, + "learning_rate": 0.00017068355277552545, + "loss": 0.6096, + "step": 11390 + }, + { + "epoch": 0.4401714351905479, + "grad_norm": 2.880949020385742, + "learning_rate": 0.00017065781175592366, + "loss": 0.3566, + "step": 11400 + }, + { + "epoch": 0.4405575504845747, + "grad_norm": 0.5541375279426575, + "learning_rate": 0.00017063207073632187, + "loss": 0.4147, + "step": 11410 + }, + { + "epoch": 0.4409436657786015, + "grad_norm": 4.519477844238281, + "learning_rate": 0.00017060632971672009, + "loss": 0.4912, + "step": 11420 + }, + { + "epoch": 0.44132978107262827, + "grad_norm": 1.9959009885787964, + "learning_rate": 0.0001705805886971183, + "loss": 0.295, + "step": 11430 + }, + { + "epoch": 0.4417158963666551, + "grad_norm": 3.843033790588379, + "learning_rate": 0.0001705548476775165, + "loss": 0.2451, + "step": 11440 + }, + { + "epoch": 0.44210201166068186, + "grad_norm": 1.0480101108551025, + "learning_rate": 0.00017052910665791473, + "loss": 0.3113, + "step": 11450 + }, + { + "epoch": 0.4424881269547087, + "grad_norm": 2.0960068702697754, + "learning_rate": 0.00017050336563831294, + "loss": 0.5959, + "step": 11460 + }, + { + "epoch": 0.44287424224873545, + "grad_norm": 1.3062267303466797, + "learning_rate": 0.00017047762461871115, + "loss": 0.3812, + "step": 11470 + }, + { + "epoch": 0.4432603575427623, + "grad_norm": 2.2563138008117676, + "learning_rate": 0.00017045188359910937, + "loss": 0.303, + "step": 11480 + }, + { + "epoch": 0.44364647283678904, + "grad_norm": 1.518556833267212, + "learning_rate": 0.00017042614257950758, + "loss": 0.4349, + "step": 11490 + }, + { + "epoch": 0.44403258813081586, + "grad_norm": 5.704294681549072, + "learning_rate": 0.0001704004015599058, + "loss": 0.3162, + "step": 11500 + }, + { + "epoch": 0.44441870342484263, + "grad_norm": 3.3146274089813232, + "learning_rate": 0.000170374660540304, + "loss": 0.644, + "step": 11510 + }, + { + "epoch": 0.44480481871886945, + "grad_norm": 2.285374879837036, + "learning_rate": 0.00017034891952070222, + "loss": 0.3718, + "step": 11520 + }, + { + "epoch": 0.4451909340128963, + "grad_norm": 0.07299748063087463, + "learning_rate": 0.00017032317850110043, + "loss": 0.3093, + "step": 11530 + }, + { + "epoch": 0.44557704930692305, + "grad_norm": 4.159457683563232, + "learning_rate": 0.00017029743748149865, + "loss": 0.4074, + "step": 11540 + }, + { + "epoch": 0.44596316460094987, + "grad_norm": 2.2241604328155518, + "learning_rate": 0.00017027169646189686, + "loss": 0.2908, + "step": 11550 + }, + { + "epoch": 0.44634927989497664, + "grad_norm": 2.2086968421936035, + "learning_rate": 0.00017024595544229507, + "loss": 0.3352, + "step": 11560 + }, + { + "epoch": 0.44673539518900346, + "grad_norm": 0.2843379080295563, + "learning_rate": 0.00017022021442269329, + "loss": 0.2548, + "step": 11570 + }, + { + "epoch": 0.4471215104830302, + "grad_norm": 0.9805948734283447, + "learning_rate": 0.00017019447340309153, + "loss": 0.4134, + "step": 11580 + }, + { + "epoch": 0.44750762577705705, + "grad_norm": 2.3669955730438232, + "learning_rate": 0.0001701687323834897, + "loss": 0.3803, + "step": 11590 + }, + { + "epoch": 0.4478937410710838, + "grad_norm": 2.4062774181365967, + "learning_rate": 0.00017014299136388792, + "loss": 0.4391, + "step": 11600 + }, + { + "epoch": 0.44827985636511064, + "grad_norm": 0.4205828607082367, + "learning_rate": 0.00017011725034428614, + "loss": 0.2793, + "step": 11610 + }, + { + "epoch": 0.4486659716591374, + "grad_norm": 1.3258132934570312, + "learning_rate": 0.00017009150932468435, + "loss": 0.4219, + "step": 11620 + }, + { + "epoch": 0.44905208695316423, + "grad_norm": 3.250332832336426, + "learning_rate": 0.00017006576830508256, + "loss": 0.3638, + "step": 11630 + }, + { + "epoch": 0.449438202247191, + "grad_norm": 1.9546891450881958, + "learning_rate": 0.00017004002728548078, + "loss": 0.5473, + "step": 11640 + }, + { + "epoch": 0.4498243175412178, + "grad_norm": 0.6501532793045044, + "learning_rate": 0.00017001428626587902, + "loss": 0.197, + "step": 11650 + }, + { + "epoch": 0.4502104328352446, + "grad_norm": 0.7169322967529297, + "learning_rate": 0.0001699885452462772, + "loss": 0.2846, + "step": 11660 + }, + { + "epoch": 0.4505965481292714, + "grad_norm": 1.4476008415222168, + "learning_rate": 0.00016996280422667545, + "loss": 0.3601, + "step": 11670 + }, + { + "epoch": 0.4509826634232982, + "grad_norm": 1.562485933303833, + "learning_rate": 0.00016993706320707363, + "loss": 0.2876, + "step": 11680 + }, + { + "epoch": 0.451368778717325, + "grad_norm": 4.490612983703613, + "learning_rate": 0.00016991132218747184, + "loss": 0.5445, + "step": 11690 + }, + { + "epoch": 0.45175489401135177, + "grad_norm": 0.9045882225036621, + "learning_rate": 0.00016988558116787009, + "loss": 0.149, + "step": 11700 + }, + { + "epoch": 0.4521410093053786, + "grad_norm": 2.2920546531677246, + "learning_rate": 0.00016985984014826827, + "loss": 0.3922, + "step": 11710 + }, + { + "epoch": 0.45252712459940536, + "grad_norm": 4.459114074707031, + "learning_rate": 0.0001698340991286665, + "loss": 0.3386, + "step": 11720 + }, + { + "epoch": 0.4529132398934322, + "grad_norm": 3.0614171028137207, + "learning_rate": 0.0001698083581090647, + "loss": 0.3618, + "step": 11730 + }, + { + "epoch": 0.45329935518745895, + "grad_norm": 1.0597162246704102, + "learning_rate": 0.00016978261708946294, + "loss": 0.2012, + "step": 11740 + }, + { + "epoch": 0.4536854704814858, + "grad_norm": 0.9142243266105652, + "learning_rate": 0.00016975687606986112, + "loss": 0.3716, + "step": 11750 + }, + { + "epoch": 0.45407158577551254, + "grad_norm": 1.7402280569076538, + "learning_rate": 0.00016973113505025934, + "loss": 0.3046, + "step": 11760 + }, + { + "epoch": 0.45445770106953937, + "grad_norm": 0.7389079928398132, + "learning_rate": 0.00016970539403065758, + "loss": 0.287, + "step": 11770 + }, + { + "epoch": 0.45484381636356613, + "grad_norm": 2.0596721172332764, + "learning_rate": 0.00016967965301105576, + "loss": 0.5124, + "step": 11780 + }, + { + "epoch": 0.45522993165759296, + "grad_norm": 0.26743578910827637, + "learning_rate": 0.000169653911991454, + "loss": 0.6725, + "step": 11790 + }, + { + "epoch": 0.4556160469516198, + "grad_norm": 2.812441825866699, + "learning_rate": 0.0001696281709718522, + "loss": 0.4545, + "step": 11800 + }, + { + "epoch": 0.45600216224564655, + "grad_norm": 2.9857542514801025, + "learning_rate": 0.00016960242995225043, + "loss": 0.2865, + "step": 11810 + }, + { + "epoch": 0.45638827753967337, + "grad_norm": 0.4560181200504303, + "learning_rate": 0.00016957668893264862, + "loss": 0.1623, + "step": 11820 + }, + { + "epoch": 0.45677439283370014, + "grad_norm": 2.2492222785949707, + "learning_rate": 0.00016955094791304683, + "loss": 0.2813, + "step": 11830 + }, + { + "epoch": 0.45716050812772696, + "grad_norm": 1.0125524997711182, + "learning_rate": 0.00016952520689344507, + "loss": 0.2224, + "step": 11840 + }, + { + "epoch": 0.45754662342175373, + "grad_norm": 0.4849410057067871, + "learning_rate": 0.00016949946587384326, + "loss": 0.4838, + "step": 11850 + }, + { + "epoch": 0.45793273871578055, + "grad_norm": 2.313490390777588, + "learning_rate": 0.0001694737248542415, + "loss": 0.3855, + "step": 11860 + }, + { + "epoch": 0.4583188540098073, + "grad_norm": 2.3526558876037598, + "learning_rate": 0.00016944798383463968, + "loss": 0.3081, + "step": 11870 + }, + { + "epoch": 0.45870496930383414, + "grad_norm": 0.5723626613616943, + "learning_rate": 0.00016942224281503792, + "loss": 0.3126, + "step": 11880 + }, + { + "epoch": 0.4590910845978609, + "grad_norm": 4.130553722381592, + "learning_rate": 0.00016939650179543614, + "loss": 0.2295, + "step": 11890 + }, + { + "epoch": 0.45947719989188773, + "grad_norm": 1.3537687063217163, + "learning_rate": 0.00016937076077583432, + "loss": 0.3488, + "step": 11900 + }, + { + "epoch": 0.4598633151859145, + "grad_norm": 1.843441128730774, + "learning_rate": 0.00016934501975623256, + "loss": 0.6298, + "step": 11910 + }, + { + "epoch": 0.4602494304799413, + "grad_norm": 4.685494422912598, + "learning_rate": 0.00016931927873663075, + "loss": 0.4929, + "step": 11920 + }, + { + "epoch": 0.4606355457739681, + "grad_norm": 1.5470925569534302, + "learning_rate": 0.000169293537717029, + "loss": 0.3214, + "step": 11930 + }, + { + "epoch": 0.4610216610679949, + "grad_norm": 2.0928761959075928, + "learning_rate": 0.00016926779669742718, + "loss": 0.317, + "step": 11940 + }, + { + "epoch": 0.4614077763620217, + "grad_norm": 1.0828526020050049, + "learning_rate": 0.00016924205567782542, + "loss": 0.4204, + "step": 11950 + }, + { + "epoch": 0.4617938916560485, + "grad_norm": 2.2898383140563965, + "learning_rate": 0.00016921631465822363, + "loss": 0.4149, + "step": 11960 + }, + { + "epoch": 0.4621800069500753, + "grad_norm": 1.1590880155563354, + "learning_rate": 0.00016919057363862182, + "loss": 0.2372, + "step": 11970 + }, + { + "epoch": 0.4625661222441021, + "grad_norm": 0.6889861226081848, + "learning_rate": 0.00016916483261902006, + "loss": 0.4905, + "step": 11980 + }, + { + "epoch": 0.46295223753812886, + "grad_norm": 0.7974410653114319, + "learning_rate": 0.00016913909159941824, + "loss": 0.3451, + "step": 11990 + }, + { + "epoch": 0.4633383528321557, + "grad_norm": 3.254754066467285, + "learning_rate": 0.00016911335057981648, + "loss": 0.4018, + "step": 12000 + }, + { + "epoch": 0.46372446812618245, + "grad_norm": 2.660983085632324, + "learning_rate": 0.0001690876095602147, + "loss": 0.2112, + "step": 12010 + }, + { + "epoch": 0.4641105834202093, + "grad_norm": 1.1055381298065186, + "learning_rate": 0.0001690618685406129, + "loss": 0.2994, + "step": 12020 + }, + { + "epoch": 0.46449669871423604, + "grad_norm": 0.26787269115448, + "learning_rate": 0.00016903612752101112, + "loss": 0.2744, + "step": 12030 + }, + { + "epoch": 0.46488281400826287, + "grad_norm": 0.8832791447639465, + "learning_rate": 0.0001690103865014093, + "loss": 0.4357, + "step": 12040 + }, + { + "epoch": 0.4652689293022897, + "grad_norm": 1.0007046461105347, + "learning_rate": 0.00016898464548180755, + "loss": 0.2873, + "step": 12050 + }, + { + "epoch": 0.46565504459631646, + "grad_norm": 2.0366733074188232, + "learning_rate": 0.00016895890446220574, + "loss": 0.3016, + "step": 12060 + }, + { + "epoch": 0.4660411598903433, + "grad_norm": 2.557812213897705, + "learning_rate": 0.00016893316344260398, + "loss": 0.6375, + "step": 12070 + }, + { + "epoch": 0.46642727518437005, + "grad_norm": 1.9911783933639526, + "learning_rate": 0.0001689074224230022, + "loss": 0.5568, + "step": 12080 + }, + { + "epoch": 0.4668133904783969, + "grad_norm": 0.6905809044837952, + "learning_rate": 0.0001688816814034004, + "loss": 0.3237, + "step": 12090 + }, + { + "epoch": 0.46719950577242364, + "grad_norm": 2.3139610290527344, + "learning_rate": 0.00016885594038379862, + "loss": 0.3656, + "step": 12100 + }, + { + "epoch": 0.46758562106645046, + "grad_norm": 1.580295443534851, + "learning_rate": 0.0001688301993641968, + "loss": 0.3134, + "step": 12110 + }, + { + "epoch": 0.46797173636047723, + "grad_norm": 1.1115995645523071, + "learning_rate": 0.00016880445834459504, + "loss": 0.6167, + "step": 12120 + }, + { + "epoch": 0.46835785165450405, + "grad_norm": 3.0928075313568115, + "learning_rate": 0.00016877871732499323, + "loss": 0.3922, + "step": 12130 + }, + { + "epoch": 0.4687439669485308, + "grad_norm": 2.277163028717041, + "learning_rate": 0.00016875297630539147, + "loss": 0.2395, + "step": 12140 + }, + { + "epoch": 0.46913008224255764, + "grad_norm": 1.6578807830810547, + "learning_rate": 0.00016872723528578968, + "loss": 0.3502, + "step": 12150 + }, + { + "epoch": 0.4695161975365844, + "grad_norm": 2.9669971466064453, + "learning_rate": 0.0001687014942661879, + "loss": 0.3278, + "step": 12160 + }, + { + "epoch": 0.46990231283061124, + "grad_norm": 3.344914674758911, + "learning_rate": 0.0001686757532465861, + "loss": 0.4856, + "step": 12170 + }, + { + "epoch": 0.470288428124638, + "grad_norm": 1.512459635734558, + "learning_rate": 0.00016865001222698432, + "loss": 0.3113, + "step": 12180 + }, + { + "epoch": 0.4706745434186648, + "grad_norm": 1.8372429609298706, + "learning_rate": 0.00016862427120738254, + "loss": 0.3534, + "step": 12190 + }, + { + "epoch": 0.4710606587126916, + "grad_norm": 2.5016984939575195, + "learning_rate": 0.00016859853018778075, + "loss": 0.2129, + "step": 12200 + }, + { + "epoch": 0.4714467740067184, + "grad_norm": 2.083526134490967, + "learning_rate": 0.00016857278916817896, + "loss": 0.3011, + "step": 12210 + }, + { + "epoch": 0.4718328893007452, + "grad_norm": 3.3518013954162598, + "learning_rate": 0.00016854704814857718, + "loss": 0.2652, + "step": 12220 + }, + { + "epoch": 0.472219004594772, + "grad_norm": 0.34668633341789246, + "learning_rate": 0.0001685213071289754, + "loss": 0.3655, + "step": 12230 + }, + { + "epoch": 0.4726051198887988, + "grad_norm": 1.0674203634262085, + "learning_rate": 0.0001684955661093736, + "loss": 0.3217, + "step": 12240 + }, + { + "epoch": 0.4729912351828256, + "grad_norm": 2.9859087467193604, + "learning_rate": 0.00016846982508977182, + "loss": 0.447, + "step": 12250 + }, + { + "epoch": 0.47337735047685237, + "grad_norm": 0.33996835350990295, + "learning_rate": 0.00016844408407017003, + "loss": 0.1915, + "step": 12260 + }, + { + "epoch": 0.4737634657708792, + "grad_norm": 0.10328155755996704, + "learning_rate": 0.00016841834305056824, + "loss": 0.2588, + "step": 12270 + }, + { + "epoch": 0.47414958106490596, + "grad_norm": 2.0321199893951416, + "learning_rate": 0.00016839260203096646, + "loss": 0.4788, + "step": 12280 + }, + { + "epoch": 0.4745356963589328, + "grad_norm": 1.5690089464187622, + "learning_rate": 0.00016836686101136467, + "loss": 0.5198, + "step": 12290 + }, + { + "epoch": 0.47492181165295955, + "grad_norm": 1.989465355873108, + "learning_rate": 0.00016834111999176288, + "loss": 0.2554, + "step": 12300 + }, + { + "epoch": 0.47530792694698637, + "grad_norm": 1.574174404144287, + "learning_rate": 0.0001683153789721611, + "loss": 0.3703, + "step": 12310 + }, + { + "epoch": 0.4756940422410132, + "grad_norm": 2.726776599884033, + "learning_rate": 0.0001682896379525593, + "loss": 0.7426, + "step": 12320 + }, + { + "epoch": 0.47608015753503996, + "grad_norm": 0.3101334571838379, + "learning_rate": 0.00016826389693295752, + "loss": 0.1861, + "step": 12330 + }, + { + "epoch": 0.4764662728290668, + "grad_norm": 5.30327033996582, + "learning_rate": 0.00016823815591335574, + "loss": 0.4261, + "step": 12340 + }, + { + "epoch": 0.47685238812309355, + "grad_norm": 0.5553661584854126, + "learning_rate": 0.00016821241489375395, + "loss": 0.7326, + "step": 12350 + }, + { + "epoch": 0.4772385034171204, + "grad_norm": 2.2244138717651367, + "learning_rate": 0.00016818667387415216, + "loss": 0.5406, + "step": 12360 + }, + { + "epoch": 0.47762461871114714, + "grad_norm": 0.5314281582832336, + "learning_rate": 0.00016816093285455038, + "loss": 0.3765, + "step": 12370 + }, + { + "epoch": 0.47801073400517397, + "grad_norm": 3.644477128982544, + "learning_rate": 0.0001681351918349486, + "loss": 0.5519, + "step": 12380 + }, + { + "epoch": 0.47839684929920073, + "grad_norm": 1.0556128025054932, + "learning_rate": 0.0001681094508153468, + "loss": 0.2128, + "step": 12390 + }, + { + "epoch": 0.47878296459322756, + "grad_norm": 2.7807135581970215, + "learning_rate": 0.00016808370979574502, + "loss": 0.1564, + "step": 12400 + }, + { + "epoch": 0.4791690798872543, + "grad_norm": 0.7862847447395325, + "learning_rate": 0.00016805796877614323, + "loss": 0.35, + "step": 12410 + }, + { + "epoch": 0.47955519518128115, + "grad_norm": 2.7445156574249268, + "learning_rate": 0.00016803222775654144, + "loss": 0.4167, + "step": 12420 + }, + { + "epoch": 0.4799413104753079, + "grad_norm": 0.6173526644706726, + "learning_rate": 0.00016800648673693966, + "loss": 0.3669, + "step": 12430 + }, + { + "epoch": 0.48032742576933474, + "grad_norm": 0.3762228488922119, + "learning_rate": 0.00016798074571733787, + "loss": 0.179, + "step": 12440 + }, + { + "epoch": 0.4807135410633615, + "grad_norm": 1.2806499004364014, + "learning_rate": 0.00016795500469773608, + "loss": 0.3926, + "step": 12450 + }, + { + "epoch": 0.48109965635738833, + "grad_norm": 1.0579105615615845, + "learning_rate": 0.0001679292636781343, + "loss": 0.3329, + "step": 12460 + }, + { + "epoch": 0.4814857716514151, + "grad_norm": 1.6502699851989746, + "learning_rate": 0.0001679035226585325, + "loss": 0.3149, + "step": 12470 + }, + { + "epoch": 0.4818718869454419, + "grad_norm": 0.8876189589500427, + "learning_rate": 0.00016787778163893072, + "loss": 0.3262, + "step": 12480 + }, + { + "epoch": 0.4822580022394687, + "grad_norm": 2.4398770332336426, + "learning_rate": 0.00016785204061932894, + "loss": 0.3834, + "step": 12490 + }, + { + "epoch": 0.4826441175334955, + "grad_norm": 1.275148868560791, + "learning_rate": 0.00016782629959972715, + "loss": 0.3749, + "step": 12500 + }, + { + "epoch": 0.4830302328275223, + "grad_norm": 2.1397783756256104, + "learning_rate": 0.0001678005585801254, + "loss": 0.4726, + "step": 12510 + }, + { + "epoch": 0.4834163481215491, + "grad_norm": 1.6431002616882324, + "learning_rate": 0.00016777481756052358, + "loss": 0.3537, + "step": 12520 + }, + { + "epoch": 0.48380246341557587, + "grad_norm": 1.1193108558654785, + "learning_rate": 0.0001677490765409218, + "loss": 0.3182, + "step": 12530 + }, + { + "epoch": 0.4841885787096027, + "grad_norm": 1.365897297859192, + "learning_rate": 0.00016772333552132, + "loss": 0.3455, + "step": 12540 + }, + { + "epoch": 0.48457469400362946, + "grad_norm": 0.9035172462463379, + "learning_rate": 0.00016769759450171822, + "loss": 0.2268, + "step": 12550 + }, + { + "epoch": 0.4849608092976563, + "grad_norm": 0.15492293238639832, + "learning_rate": 0.00016767185348211643, + "loss": 0.319, + "step": 12560 + }, + { + "epoch": 0.4853469245916831, + "grad_norm": 0.19394727051258087, + "learning_rate": 0.00016764611246251464, + "loss": 0.2073, + "step": 12570 + }, + { + "epoch": 0.48573303988570987, + "grad_norm": 1.4406816959381104, + "learning_rate": 0.00016762037144291288, + "loss": 0.4538, + "step": 12580 + }, + { + "epoch": 0.4861191551797367, + "grad_norm": 2.741548538208008, + "learning_rate": 0.00016759463042331107, + "loss": 0.536, + "step": 12590 + }, + { + "epoch": 0.48650527047376346, + "grad_norm": 0.5203917622566223, + "learning_rate": 0.00016756888940370928, + "loss": 0.2484, + "step": 12600 + }, + { + "epoch": 0.4868913857677903, + "grad_norm": 0.9666195511817932, + "learning_rate": 0.0001675431483841075, + "loss": 0.3834, + "step": 12610 + }, + { + "epoch": 0.48727750106181705, + "grad_norm": 1.7210304737091064, + "learning_rate": 0.0001675174073645057, + "loss": 0.3223, + "step": 12620 + }, + { + "epoch": 0.4876636163558439, + "grad_norm": 0.6125622987747192, + "learning_rate": 0.00016749166634490392, + "loss": 0.394, + "step": 12630 + }, + { + "epoch": 0.48804973164987064, + "grad_norm": 1.9612951278686523, + "learning_rate": 0.00016746592532530214, + "loss": 0.3367, + "step": 12640 + }, + { + "epoch": 0.48843584694389747, + "grad_norm": 1.5395468473434448, + "learning_rate": 0.00016744018430570038, + "loss": 0.5441, + "step": 12650 + }, + { + "epoch": 0.48882196223792423, + "grad_norm": 0.8706358671188354, + "learning_rate": 0.00016741444328609856, + "loss": 0.422, + "step": 12660 + }, + { + "epoch": 0.48920807753195106, + "grad_norm": 4.338102340698242, + "learning_rate": 0.00016738870226649678, + "loss": 0.3932, + "step": 12670 + }, + { + "epoch": 0.4895941928259778, + "grad_norm": 0.9501354694366455, + "learning_rate": 0.000167362961246895, + "loss": 0.563, + "step": 12680 + }, + { + "epoch": 0.48998030812000465, + "grad_norm": 1.3146884441375732, + "learning_rate": 0.0001673372202272932, + "loss": 0.5116, + "step": 12690 + }, + { + "epoch": 0.4903664234140314, + "grad_norm": 2.144622564315796, + "learning_rate": 0.00016731147920769144, + "loss": 0.4792, + "step": 12700 + }, + { + "epoch": 0.49075253870805824, + "grad_norm": 0.9871418476104736, + "learning_rate": 0.00016728573818808963, + "loss": 0.3802, + "step": 12710 + }, + { + "epoch": 0.491138654002085, + "grad_norm": 1.710766315460205, + "learning_rate": 0.00016725999716848787, + "loss": 0.2859, + "step": 12720 + }, + { + "epoch": 0.49152476929611183, + "grad_norm": 2.528146505355835, + "learning_rate": 0.00016723425614888606, + "loss": 0.4565, + "step": 12730 + }, + { + "epoch": 0.4919108845901386, + "grad_norm": 0.8471786379814148, + "learning_rate": 0.00016720851512928427, + "loss": 0.3455, + "step": 12740 + }, + { + "epoch": 0.4922969998841654, + "grad_norm": 2.6623692512512207, + "learning_rate": 0.00016718277410968248, + "loss": 0.3687, + "step": 12750 + }, + { + "epoch": 0.4926831151781922, + "grad_norm": 2.9324758052825928, + "learning_rate": 0.0001671570330900807, + "loss": 0.2652, + "step": 12760 + }, + { + "epoch": 0.493069230472219, + "grad_norm": 1.7955294847488403, + "learning_rate": 0.00016713129207047894, + "loss": 0.3156, + "step": 12770 + }, + { + "epoch": 0.4934553457662458, + "grad_norm": 0.9923033118247986, + "learning_rate": 0.00016710555105087712, + "loss": 0.3042, + "step": 12780 + }, + { + "epoch": 0.4938414610602726, + "grad_norm": 0.9309022426605225, + "learning_rate": 0.00016707981003127536, + "loss": 0.2742, + "step": 12790 + }, + { + "epoch": 0.49422757635429937, + "grad_norm": 4.864802360534668, + "learning_rate": 0.00016705406901167355, + "loss": 0.6145, + "step": 12800 + }, + { + "epoch": 0.4946136916483262, + "grad_norm": 2.4508230686187744, + "learning_rate": 0.00016702832799207176, + "loss": 0.4218, + "step": 12810 + }, + { + "epoch": 0.49499980694235296, + "grad_norm": 2.0317444801330566, + "learning_rate": 0.00016700258697246998, + "loss": 0.5682, + "step": 12820 + }, + { + "epoch": 0.4953859222363798, + "grad_norm": 4.89669942855835, + "learning_rate": 0.0001669768459528682, + "loss": 0.5654, + "step": 12830 + }, + { + "epoch": 0.4957720375304066, + "grad_norm": 0.602165162563324, + "learning_rate": 0.00016695110493326643, + "loss": 0.2719, + "step": 12840 + }, + { + "epoch": 0.4961581528244334, + "grad_norm": 1.1574476957321167, + "learning_rate": 0.00016692536391366462, + "loss": 0.3872, + "step": 12850 + }, + { + "epoch": 0.4965442681184602, + "grad_norm": 0.4792019724845886, + "learning_rate": 0.00016689962289406286, + "loss": 0.2671, + "step": 12860 + }, + { + "epoch": 0.49693038341248696, + "grad_norm": 1.4611676931381226, + "learning_rate": 0.00016687388187446104, + "loss": 0.3378, + "step": 12870 + }, + { + "epoch": 0.4973164987065138, + "grad_norm": 1.183975338935852, + "learning_rate": 0.00016684814085485928, + "loss": 0.2645, + "step": 12880 + }, + { + "epoch": 0.49770261400054056, + "grad_norm": 2.1447482109069824, + "learning_rate": 0.0001668223998352575, + "loss": 0.1678, + "step": 12890 + }, + { + "epoch": 0.4980887292945674, + "grad_norm": 1.5829964876174927, + "learning_rate": 0.00016679665881565568, + "loss": 0.4113, + "step": 12900 + }, + { + "epoch": 0.49847484458859415, + "grad_norm": 1.329871654510498, + "learning_rate": 0.00016677091779605392, + "loss": 0.2639, + "step": 12910 + }, + { + "epoch": 0.49886095988262097, + "grad_norm": 4.797327518463135, + "learning_rate": 0.0001667451767764521, + "loss": 0.3396, + "step": 12920 + }, + { + "epoch": 0.49924707517664774, + "grad_norm": 2.5864250659942627, + "learning_rate": 0.00016671943575685035, + "loss": 0.4143, + "step": 12930 + }, + { + "epoch": 0.49963319047067456, + "grad_norm": 0.05073557794094086, + "learning_rate": 0.00016669369473724854, + "loss": 0.231, + "step": 12940 + }, + { + "epoch": 0.5000193057647013, + "grad_norm": 0.9530317783355713, + "learning_rate": 0.00016666795371764678, + "loss": 0.3702, + "step": 12950 + }, + { + "epoch": 0.5004054210587281, + "grad_norm": 1.3573989868164062, + "learning_rate": 0.000166642212698045, + "loss": 0.2095, + "step": 12960 + }, + { + "epoch": 0.500791536352755, + "grad_norm": 3.2758514881134033, + "learning_rate": 0.00016661647167844318, + "loss": 0.3901, + "step": 12970 + }, + { + "epoch": 0.5011776516467817, + "grad_norm": 2.359602212905884, + "learning_rate": 0.00016659073065884142, + "loss": 0.6232, + "step": 12980 + }, + { + "epoch": 0.5015637669408085, + "grad_norm": 0.5743809938430786, + "learning_rate": 0.0001665649896392396, + "loss": 0.431, + "step": 12990 + }, + { + "epoch": 0.5019498822348353, + "grad_norm": 2.229215621948242, + "learning_rate": 0.00016653924861963784, + "loss": 0.33, + "step": 13000 + }, + { + "epoch": 0.5023359975288622, + "grad_norm": 3.1233408451080322, + "learning_rate": 0.00016651350760003606, + "loss": 0.3527, + "step": 13010 + }, + { + "epoch": 0.5027221128228889, + "grad_norm": 0.4846508204936981, + "learning_rate": 0.00016648776658043427, + "loss": 0.4514, + "step": 13020 + }, + { + "epoch": 0.5031082281169157, + "grad_norm": 0.807447612285614, + "learning_rate": 0.00016646202556083248, + "loss": 0.3643, + "step": 13030 + }, + { + "epoch": 0.5034943434109425, + "grad_norm": 3.226194143295288, + "learning_rate": 0.00016643628454123067, + "loss": 0.4803, + "step": 13040 + }, + { + "epoch": 0.5038804587049693, + "grad_norm": 6.127805233001709, + "learning_rate": 0.0001664105435216289, + "loss": 0.3501, + "step": 13050 + }, + { + "epoch": 0.5042665739989961, + "grad_norm": 1.3367782831192017, + "learning_rate": 0.0001663848025020271, + "loss": 0.2259, + "step": 13060 + }, + { + "epoch": 0.5046526892930229, + "grad_norm": 1.112602710723877, + "learning_rate": 0.00016635906148242534, + "loss": 0.3868, + "step": 13070 + }, + { + "epoch": 0.5050388045870496, + "grad_norm": 3.188282012939453, + "learning_rate": 0.00016633332046282355, + "loss": 0.2783, + "step": 13080 + }, + { + "epoch": 0.5054249198810765, + "grad_norm": 1.5972063541412354, + "learning_rate": 0.00016630757944322176, + "loss": 0.4661, + "step": 13090 + }, + { + "epoch": 0.5058110351751033, + "grad_norm": 0.8210055232048035, + "learning_rate": 0.00016628183842361998, + "loss": 0.2536, + "step": 13100 + }, + { + "epoch": 0.50619715046913, + "grad_norm": 2.279244899749756, + "learning_rate": 0.00016625609740401816, + "loss": 0.3158, + "step": 13110 + }, + { + "epoch": 0.5065832657631569, + "grad_norm": 2.9017488956451416, + "learning_rate": 0.0001662303563844164, + "loss": 0.4209, + "step": 13120 + }, + { + "epoch": 0.5069693810571837, + "grad_norm": 0.9567920565605164, + "learning_rate": 0.0001662046153648146, + "loss": 0.3869, + "step": 13130 + }, + { + "epoch": 0.5073554963512105, + "grad_norm": 1.3605408668518066, + "learning_rate": 0.00016617887434521283, + "loss": 0.3529, + "step": 13140 + }, + { + "epoch": 0.5077416116452372, + "grad_norm": 0.502921998500824, + "learning_rate": 0.00016615313332561104, + "loss": 0.2584, + "step": 13150 + }, + { + "epoch": 0.5081277269392641, + "grad_norm": 2.9092366695404053, + "learning_rate": 0.00016612739230600926, + "loss": 0.3494, + "step": 13160 + }, + { + "epoch": 0.5085138422332909, + "grad_norm": 0.28300145268440247, + "learning_rate": 0.00016610165128640747, + "loss": 0.3514, + "step": 13170 + }, + { + "epoch": 0.5088999575273176, + "grad_norm": 1.326134204864502, + "learning_rate": 0.00016607591026680566, + "loss": 0.1593, + "step": 13180 + }, + { + "epoch": 0.5092860728213444, + "grad_norm": 2.1484436988830566, + "learning_rate": 0.0001660501692472039, + "loss": 0.4468, + "step": 13190 + }, + { + "epoch": 0.5096721881153713, + "grad_norm": 1.9255646467208862, + "learning_rate": 0.0001660244282276021, + "loss": 0.3937, + "step": 13200 + }, + { + "epoch": 0.5100583034093981, + "grad_norm": 0.01497764140367508, + "learning_rate": 0.00016599868720800032, + "loss": 0.2795, + "step": 13210 + }, + { + "epoch": 0.5104444187034248, + "grad_norm": 0.992023766040802, + "learning_rate": 0.00016597294618839854, + "loss": 0.2389, + "step": 13220 + }, + { + "epoch": 0.5108305339974516, + "grad_norm": 1.517337441444397, + "learning_rate": 0.00016594720516879675, + "loss": 0.2821, + "step": 13230 + }, + { + "epoch": 0.5112166492914785, + "grad_norm": 2.355637311935425, + "learning_rate": 0.00016592146414919496, + "loss": 0.5727, + "step": 13240 + }, + { + "epoch": 0.5116027645855052, + "grad_norm": 2.450536012649536, + "learning_rate": 0.00016589572312959315, + "loss": 0.4887, + "step": 13250 + }, + { + "epoch": 0.511988879879532, + "grad_norm": 1.2966598272323608, + "learning_rate": 0.0001658699821099914, + "loss": 0.3605, + "step": 13260 + }, + { + "epoch": 0.5123749951735588, + "grad_norm": 1.3981765508651733, + "learning_rate": 0.0001658442410903896, + "loss": 0.5329, + "step": 13270 + }, + { + "epoch": 0.5127611104675857, + "grad_norm": 0.7260739803314209, + "learning_rate": 0.00016581850007078782, + "loss": 0.3447, + "step": 13280 + }, + { + "epoch": 0.5131472257616124, + "grad_norm": 1.1348093748092651, + "learning_rate": 0.00016579275905118603, + "loss": 0.3086, + "step": 13290 + }, + { + "epoch": 0.5135333410556392, + "grad_norm": 1.3015291690826416, + "learning_rate": 0.00016576701803158424, + "loss": 0.3411, + "step": 13300 + }, + { + "epoch": 0.513919456349666, + "grad_norm": 2.352766990661621, + "learning_rate": 0.00016574127701198246, + "loss": 0.1617, + "step": 13310 + }, + { + "epoch": 0.5143055716436928, + "grad_norm": 2.0096113681793213, + "learning_rate": 0.00016571553599238067, + "loss": 0.3168, + "step": 13320 + }, + { + "epoch": 0.5146916869377196, + "grad_norm": 0.8163488507270813, + "learning_rate": 0.00016568979497277888, + "loss": 0.3318, + "step": 13330 + }, + { + "epoch": 0.5150778022317464, + "grad_norm": 0.8218249082565308, + "learning_rate": 0.0001656640539531771, + "loss": 0.2303, + "step": 13340 + }, + { + "epoch": 0.5154639175257731, + "grad_norm": 3.5414462089538574, + "learning_rate": 0.0001656383129335753, + "loss": 0.2295, + "step": 13350 + }, + { + "epoch": 0.5158500328198, + "grad_norm": 2.176178216934204, + "learning_rate": 0.00016561257191397352, + "loss": 0.5767, + "step": 13360 + }, + { + "epoch": 0.5162361481138268, + "grad_norm": 1.4649319648742676, + "learning_rate": 0.00016558683089437174, + "loss": 0.174, + "step": 13370 + }, + { + "epoch": 0.5166222634078536, + "grad_norm": 2.107895851135254, + "learning_rate": 0.00016556108987476995, + "loss": 0.3444, + "step": 13380 + }, + { + "epoch": 0.5170083787018804, + "grad_norm": 1.3116638660430908, + "learning_rate": 0.00016553534885516816, + "loss": 0.2462, + "step": 13390 + }, + { + "epoch": 0.5173944939959072, + "grad_norm": 2.867553949356079, + "learning_rate": 0.00016550960783556638, + "loss": 0.3106, + "step": 13400 + }, + { + "epoch": 0.517780609289934, + "grad_norm": 3.4331533908843994, + "learning_rate": 0.0001654838668159646, + "loss": 0.5359, + "step": 13410 + }, + { + "epoch": 0.5181667245839607, + "grad_norm": 1.8306528329849243, + "learning_rate": 0.0001654581257963628, + "loss": 0.4225, + "step": 13420 + }, + { + "epoch": 0.5185528398779876, + "grad_norm": 1.242026448249817, + "learning_rate": 0.00016543238477676102, + "loss": 0.2679, + "step": 13430 + }, + { + "epoch": 0.5189389551720144, + "grad_norm": 0.5788571834564209, + "learning_rate": 0.00016540664375715923, + "loss": 0.369, + "step": 13440 + }, + { + "epoch": 0.5193250704660411, + "grad_norm": 0.22553350031375885, + "learning_rate": 0.00016538090273755744, + "loss": 0.422, + "step": 13450 + }, + { + "epoch": 0.5197111857600679, + "grad_norm": 1.6932384967803955, + "learning_rate": 0.00016535516171795566, + "loss": 0.3127, + "step": 13460 + }, + { + "epoch": 0.5200973010540948, + "grad_norm": 1.3747683763504028, + "learning_rate": 0.00016532942069835387, + "loss": 0.2751, + "step": 13470 + }, + { + "epoch": 0.5204834163481216, + "grad_norm": 2.0508596897125244, + "learning_rate": 0.00016530367967875208, + "loss": 0.4951, + "step": 13480 + }, + { + "epoch": 0.5208695316421483, + "grad_norm": 1.044503092765808, + "learning_rate": 0.0001652779386591503, + "loss": 0.2329, + "step": 13490 + }, + { + "epoch": 0.5212556469361751, + "grad_norm": 1.6400004625320435, + "learning_rate": 0.0001652521976395485, + "loss": 0.1846, + "step": 13500 + }, + { + "epoch": 0.521641762230202, + "grad_norm": 2.234170913696289, + "learning_rate": 0.00016522645661994672, + "loss": 0.5005, + "step": 13510 + }, + { + "epoch": 0.5220278775242287, + "grad_norm": 1.8167870044708252, + "learning_rate": 0.00016520071560034493, + "loss": 0.4629, + "step": 13520 + }, + { + "epoch": 0.5224139928182555, + "grad_norm": 1.7200794219970703, + "learning_rate": 0.00016517497458074315, + "loss": 0.4546, + "step": 13530 + }, + { + "epoch": 0.5228001081122823, + "grad_norm": 2.845244884490967, + "learning_rate": 0.00016514923356114136, + "loss": 0.2211, + "step": 13540 + }, + { + "epoch": 0.5231862234063092, + "grad_norm": 5.3078389167785645, + "learning_rate": 0.00016512349254153957, + "loss": 0.4903, + "step": 13550 + }, + { + "epoch": 0.5235723387003359, + "grad_norm": 8.109561920166016, + "learning_rate": 0.0001650977515219378, + "loss": 0.3624, + "step": 13560 + }, + { + "epoch": 0.5239584539943627, + "grad_norm": 0.537749171257019, + "learning_rate": 0.000165072010502336, + "loss": 0.2262, + "step": 13570 + }, + { + "epoch": 0.5243445692883895, + "grad_norm": 2.8182802200317383, + "learning_rate": 0.00016504626948273421, + "loss": 0.4173, + "step": 13580 + }, + { + "epoch": 0.5247306845824163, + "grad_norm": 0.6623479127883911, + "learning_rate": 0.00016502052846313243, + "loss": 0.2955, + "step": 13590 + }, + { + "epoch": 0.5251167998764431, + "grad_norm": 3.027707099914551, + "learning_rate": 0.00016499478744353064, + "loss": 0.5272, + "step": 13600 + }, + { + "epoch": 0.5255029151704699, + "grad_norm": 1.6830018758773804, + "learning_rate": 0.00016496904642392885, + "loss": 0.2169, + "step": 13610 + }, + { + "epoch": 0.5258890304644968, + "grad_norm": 3.0182113647460938, + "learning_rate": 0.00016494330540432707, + "loss": 0.5747, + "step": 13620 + }, + { + "epoch": 0.5262751457585235, + "grad_norm": 1.5487585067749023, + "learning_rate": 0.00016491756438472528, + "loss": 0.2585, + "step": 13630 + }, + { + "epoch": 0.5266612610525503, + "grad_norm": 1.2525122165679932, + "learning_rate": 0.0001648918233651235, + "loss": 0.3622, + "step": 13640 + }, + { + "epoch": 0.5270473763465771, + "grad_norm": 1.9910658597946167, + "learning_rate": 0.00016486608234552174, + "loss": 0.2049, + "step": 13650 + }, + { + "epoch": 0.5274334916406039, + "grad_norm": 0.7254251837730408, + "learning_rate": 0.00016484034132591992, + "loss": 0.3101, + "step": 13660 + }, + { + "epoch": 0.5278196069346307, + "grad_norm": 0.9839001893997192, + "learning_rate": 0.00016481460030631813, + "loss": 0.4723, + "step": 13670 + }, + { + "epoch": 0.5282057222286575, + "grad_norm": 2.176529884338379, + "learning_rate": 0.00016478885928671635, + "loss": 0.3669, + "step": 13680 + }, + { + "epoch": 0.5285918375226842, + "grad_norm": 2.534996509552002, + "learning_rate": 0.00016476311826711456, + "loss": 0.551, + "step": 13690 + }, + { + "epoch": 0.5289779528167111, + "grad_norm": 2.0100669860839844, + "learning_rate": 0.0001647373772475128, + "loss": 0.445, + "step": 13700 + }, + { + "epoch": 0.5293640681107379, + "grad_norm": 0.7759265899658203, + "learning_rate": 0.000164711636227911, + "loss": 0.2839, + "step": 13710 + }, + { + "epoch": 0.5297501834047647, + "grad_norm": 3.398287057876587, + "learning_rate": 0.00016468589520830923, + "loss": 0.3497, + "step": 13720 + }, + { + "epoch": 0.5301362986987914, + "grad_norm": 2.6792221069335938, + "learning_rate": 0.00016466015418870741, + "loss": 0.3435, + "step": 13730 + }, + { + "epoch": 0.5305224139928183, + "grad_norm": 0.7382081747055054, + "learning_rate": 0.00016463441316910563, + "loss": 0.367, + "step": 13740 + }, + { + "epoch": 0.5309085292868451, + "grad_norm": 0.9496407508850098, + "learning_rate": 0.00016460867214950384, + "loss": 0.305, + "step": 13750 + }, + { + "epoch": 0.5312946445808718, + "grad_norm": 1.2950342893600464, + "learning_rate": 0.00016458293112990205, + "loss": 0.2769, + "step": 13760 + }, + { + "epoch": 0.5316807598748986, + "grad_norm": 1.1744359731674194, + "learning_rate": 0.0001645571901103003, + "loss": 0.4776, + "step": 13770 + }, + { + "epoch": 0.5320668751689255, + "grad_norm": 1.1507617235183716, + "learning_rate": 0.00016453144909069848, + "loss": 0.4486, + "step": 13780 + }, + { + "epoch": 0.5324529904629522, + "grad_norm": 3.200432300567627, + "learning_rate": 0.00016450570807109672, + "loss": 0.4144, + "step": 13790 + }, + { + "epoch": 0.532839105756979, + "grad_norm": 0.991581916809082, + "learning_rate": 0.0001644799670514949, + "loss": 0.2314, + "step": 13800 + }, + { + "epoch": 0.5332252210510058, + "grad_norm": 4.800248622894287, + "learning_rate": 0.00016445422603189312, + "loss": 0.4601, + "step": 13810 + }, + { + "epoch": 0.5336113363450327, + "grad_norm": 1.2141329050064087, + "learning_rate": 0.00016442848501229136, + "loss": 0.257, + "step": 13820 + }, + { + "epoch": 0.5339974516390594, + "grad_norm": 0.8803738951683044, + "learning_rate": 0.00016440274399268955, + "loss": 0.4645, + "step": 13830 + }, + { + "epoch": 0.5343835669330862, + "grad_norm": 1.2020646333694458, + "learning_rate": 0.0001643770029730878, + "loss": 0.3751, + "step": 13840 + }, + { + "epoch": 0.534769682227113, + "grad_norm": 0.9887505173683167, + "learning_rate": 0.00016435126195348597, + "loss": 0.2266, + "step": 13850 + }, + { + "epoch": 0.5351557975211398, + "grad_norm": 0.38067731261253357, + "learning_rate": 0.00016432552093388421, + "loss": 0.3482, + "step": 13860 + }, + { + "epoch": 0.5355419128151666, + "grad_norm": 5.429462909698486, + "learning_rate": 0.0001642997799142824, + "loss": 0.4055, + "step": 13870 + }, + { + "epoch": 0.5359280281091934, + "grad_norm": 2.197861909866333, + "learning_rate": 0.00016427403889468061, + "loss": 0.1701, + "step": 13880 + }, + { + "epoch": 0.5363141434032203, + "grad_norm": 1.467132568359375, + "learning_rate": 0.00016424829787507885, + "loss": 0.3536, + "step": 13890 + }, + { + "epoch": 0.536700258697247, + "grad_norm": 2.0702550411224365, + "learning_rate": 0.00016422255685547704, + "loss": 0.5644, + "step": 13900 + }, + { + "epoch": 0.5370863739912738, + "grad_norm": 1.4855132102966309, + "learning_rate": 0.00016419681583587528, + "loss": 0.4068, + "step": 13910 + }, + { + "epoch": 0.5374724892853006, + "grad_norm": 0.6586676239967346, + "learning_rate": 0.00016417107481627347, + "loss": 0.3709, + "step": 13920 + }, + { + "epoch": 0.5378586045793274, + "grad_norm": 0.026774466037750244, + "learning_rate": 0.0001641453337966717, + "loss": 0.2635, + "step": 13930 + }, + { + "epoch": 0.5382447198733542, + "grad_norm": 0.9848103523254395, + "learning_rate": 0.0001641195927770699, + "loss": 0.249, + "step": 13940 + }, + { + "epoch": 0.538630835167381, + "grad_norm": 3.77512526512146, + "learning_rate": 0.0001640938517574681, + "loss": 0.5356, + "step": 13950 + }, + { + "epoch": 0.5390169504614077, + "grad_norm": 5.543573379516602, + "learning_rate": 0.00016406811073786635, + "loss": 0.6471, + "step": 13960 + }, + { + "epoch": 0.5394030657554346, + "grad_norm": 0.8161652684211731, + "learning_rate": 0.00016404236971826453, + "loss": 0.5691, + "step": 13970 + }, + { + "epoch": 0.5397891810494614, + "grad_norm": 1.5539859533309937, + "learning_rate": 0.00016401662869866277, + "loss": 0.4406, + "step": 13980 + }, + { + "epoch": 0.5401752963434882, + "grad_norm": 1.3035658597946167, + "learning_rate": 0.00016399088767906096, + "loss": 0.3086, + "step": 13990 + }, + { + "epoch": 0.5405614116375149, + "grad_norm": 0.9168418645858765, + "learning_rate": 0.0001639651466594592, + "loss": 0.1581, + "step": 14000 + }, + { + "epoch": 0.5409475269315418, + "grad_norm": 1.0382287502288818, + "learning_rate": 0.00016393940563985741, + "loss": 0.4723, + "step": 14010 + }, + { + "epoch": 0.5413336422255686, + "grad_norm": 2.896981716156006, + "learning_rate": 0.0001639136646202556, + "loss": 0.2999, + "step": 14020 + }, + { + "epoch": 0.5417197575195953, + "grad_norm": 0.7354179620742798, + "learning_rate": 0.00016388792360065384, + "loss": 0.4853, + "step": 14030 + }, + { + "epoch": 0.5421058728136221, + "grad_norm": 3.221067190170288, + "learning_rate": 0.00016386218258105203, + "loss": 0.3622, + "step": 14040 + }, + { + "epoch": 0.542491988107649, + "grad_norm": 6.591146469116211, + "learning_rate": 0.00016383644156145027, + "loss": 0.5803, + "step": 14050 + }, + { + "epoch": 0.5428781034016757, + "grad_norm": 3.1521377563476562, + "learning_rate": 0.00016381070054184845, + "loss": 0.3267, + "step": 14060 + }, + { + "epoch": 0.5432642186957025, + "grad_norm": 1.7890762090682983, + "learning_rate": 0.0001637849595222467, + "loss": 0.4584, + "step": 14070 + }, + { + "epoch": 0.5436503339897293, + "grad_norm": 1.6599558591842651, + "learning_rate": 0.0001637592185026449, + "loss": 0.298, + "step": 14080 + }, + { + "epoch": 0.5440364492837562, + "grad_norm": 3.521927833557129, + "learning_rate": 0.00016373347748304312, + "loss": 0.3743, + "step": 14090 + }, + { + "epoch": 0.5444225645777829, + "grad_norm": 3.8942599296569824, + "learning_rate": 0.00016370773646344133, + "loss": 0.3254, + "step": 14100 + }, + { + "epoch": 0.5448086798718097, + "grad_norm": 2.8547496795654297, + "learning_rate": 0.00016368199544383952, + "loss": 0.4073, + "step": 14110 + }, + { + "epoch": 0.5451947951658365, + "grad_norm": 1.0060430765151978, + "learning_rate": 0.00016365625442423776, + "loss": 0.1631, + "step": 14120 + }, + { + "epoch": 0.5455809104598633, + "grad_norm": 2.2001001834869385, + "learning_rate": 0.00016363051340463595, + "loss": 0.2854, + "step": 14130 + }, + { + "epoch": 0.5459670257538901, + "grad_norm": 1.3699944019317627, + "learning_rate": 0.0001636047723850342, + "loss": 0.456, + "step": 14140 + }, + { + "epoch": 0.5463531410479169, + "grad_norm": 2.1481733322143555, + "learning_rate": 0.0001635790313654324, + "loss": 0.4085, + "step": 14150 + }, + { + "epoch": 0.5467392563419438, + "grad_norm": 0.40439683198928833, + "learning_rate": 0.00016355329034583061, + "loss": 0.3932, + "step": 14160 + }, + { + "epoch": 0.5471253716359705, + "grad_norm": 2.2773404121398926, + "learning_rate": 0.00016352754932622883, + "loss": 0.2926, + "step": 14170 + }, + { + "epoch": 0.5475114869299973, + "grad_norm": 2.2974839210510254, + "learning_rate": 0.000163501808306627, + "loss": 0.2419, + "step": 14180 + }, + { + "epoch": 0.5478976022240241, + "grad_norm": 1.0429989099502563, + "learning_rate": 0.00016347606728702525, + "loss": 0.2559, + "step": 14190 + }, + { + "epoch": 0.5482837175180509, + "grad_norm": 0.8988879919052124, + "learning_rate": 0.00016345032626742347, + "loss": 0.1677, + "step": 14200 + }, + { + "epoch": 0.5486698328120777, + "grad_norm": 1.2740018367767334, + "learning_rate": 0.00016342458524782168, + "loss": 0.2452, + "step": 14210 + }, + { + "epoch": 0.5490559481061045, + "grad_norm": 1.789467692375183, + "learning_rate": 0.0001633988442282199, + "loss": 0.2272, + "step": 14220 + }, + { + "epoch": 0.5494420634001312, + "grad_norm": 3.2136781215667725, + "learning_rate": 0.0001633731032086181, + "loss": 0.4026, + "step": 14230 + }, + { + "epoch": 0.5498281786941581, + "grad_norm": 2.4747092723846436, + "learning_rate": 0.00016334736218901632, + "loss": 0.2371, + "step": 14240 + }, + { + "epoch": 0.5502142939881849, + "grad_norm": 1.5639567375183105, + "learning_rate": 0.0001633216211694145, + "loss": 0.2801, + "step": 14250 + }, + { + "epoch": 0.5506004092822117, + "grad_norm": 3.9598312377929688, + "learning_rate": 0.00016329588014981275, + "loss": 0.2583, + "step": 14260 + }, + { + "epoch": 0.5509865245762384, + "grad_norm": 1.5873563289642334, + "learning_rate": 0.00016327013913021096, + "loss": 0.2729, + "step": 14270 + }, + { + "epoch": 0.5513726398702653, + "grad_norm": 2.2313668727874756, + "learning_rate": 0.00016324439811060917, + "loss": 0.191, + "step": 14280 + }, + { + "epoch": 0.5517587551642921, + "grad_norm": 1.6087117195129395, + "learning_rate": 0.0001632186570910074, + "loss": 0.2698, + "step": 14290 + }, + { + "epoch": 0.5521448704583188, + "grad_norm": 6.5459675788879395, + "learning_rate": 0.0001631929160714056, + "loss": 0.3632, + "step": 14300 + }, + { + "epoch": 0.5525309857523456, + "grad_norm": 1.2121779918670654, + "learning_rate": 0.00016316717505180381, + "loss": 0.4541, + "step": 14310 + }, + { + "epoch": 0.5529171010463725, + "grad_norm": 2.7277257442474365, + "learning_rate": 0.00016314143403220203, + "loss": 0.1489, + "step": 14320 + }, + { + "epoch": 0.5533032163403993, + "grad_norm": 2.2566685676574707, + "learning_rate": 0.00016311569301260024, + "loss": 0.1838, + "step": 14330 + }, + { + "epoch": 0.553689331634426, + "grad_norm": 0.44783294200897217, + "learning_rate": 0.00016308995199299845, + "loss": 0.4745, + "step": 14340 + }, + { + "epoch": 0.5540754469284528, + "grad_norm": 1.0200363397598267, + "learning_rate": 0.00016306421097339667, + "loss": 0.1251, + "step": 14350 + }, + { + "epoch": 0.5544615622224797, + "grad_norm": 1.1761879920959473, + "learning_rate": 0.00016303846995379488, + "loss": 0.6837, + "step": 14360 + }, + { + "epoch": 0.5548476775165064, + "grad_norm": 1.8275704383850098, + "learning_rate": 0.0001630127289341931, + "loss": 0.3968, + "step": 14370 + }, + { + "epoch": 0.5552337928105332, + "grad_norm": 0.7219232320785522, + "learning_rate": 0.0001629869879145913, + "loss": 0.3278, + "step": 14380 + }, + { + "epoch": 0.55561990810456, + "grad_norm": 1.9161540269851685, + "learning_rate": 0.00016296124689498952, + "loss": 0.5071, + "step": 14390 + }, + { + "epoch": 0.5560060233985868, + "grad_norm": 2.4773502349853516, + "learning_rate": 0.00016293550587538773, + "loss": 0.3268, + "step": 14400 + }, + { + "epoch": 0.5563921386926136, + "grad_norm": 1.526877760887146, + "learning_rate": 0.00016290976485578595, + "loss": 0.284, + "step": 14410 + }, + { + "epoch": 0.5567782539866404, + "grad_norm": 2.082036018371582, + "learning_rate": 0.00016288402383618416, + "loss": 0.4194, + "step": 14420 + }, + { + "epoch": 0.5571643692806673, + "grad_norm": 4.1033477783203125, + "learning_rate": 0.00016285828281658237, + "loss": 0.329, + "step": 14430 + }, + { + "epoch": 0.557550484574694, + "grad_norm": 3.344879388809204, + "learning_rate": 0.0001628325417969806, + "loss": 0.3599, + "step": 14440 + }, + { + "epoch": 0.5579365998687208, + "grad_norm": 2.6200602054595947, + "learning_rate": 0.0001628068007773788, + "loss": 0.5152, + "step": 14450 + }, + { + "epoch": 0.5583227151627476, + "grad_norm": 1.05362868309021, + "learning_rate": 0.000162781059757777, + "loss": 0.4454, + "step": 14460 + }, + { + "epoch": 0.5587088304567744, + "grad_norm": 2.557406187057495, + "learning_rate": 0.00016275531873817523, + "loss": 0.3779, + "step": 14470 + }, + { + "epoch": 0.5590949457508012, + "grad_norm": 0.8478209376335144, + "learning_rate": 0.00016272957771857344, + "loss": 0.4289, + "step": 14480 + }, + { + "epoch": 0.559481061044828, + "grad_norm": 3.543574094772339, + "learning_rate": 0.00016270383669897165, + "loss": 0.37, + "step": 14490 + }, + { + "epoch": 0.5598671763388547, + "grad_norm": 0.21068768203258514, + "learning_rate": 0.00016267809567936987, + "loss": 0.2602, + "step": 14500 + }, + { + "epoch": 0.5602532916328816, + "grad_norm": 1.1703628301620483, + "learning_rate": 0.00016265235465976808, + "loss": 0.3684, + "step": 14510 + }, + { + "epoch": 0.5606394069269084, + "grad_norm": 1.4498575925827026, + "learning_rate": 0.0001626266136401663, + "loss": 0.4089, + "step": 14520 + }, + { + "epoch": 0.5610255222209352, + "grad_norm": 1.617297887802124, + "learning_rate": 0.0001626008726205645, + "loss": 0.2759, + "step": 14530 + }, + { + "epoch": 0.5614116375149619, + "grad_norm": 0.8708978891372681, + "learning_rate": 0.00016257513160096272, + "loss": 0.2523, + "step": 14540 + }, + { + "epoch": 0.5617977528089888, + "grad_norm": 1.098026156425476, + "learning_rate": 0.00016254939058136093, + "loss": 0.3507, + "step": 14550 + }, + { + "epoch": 0.5621838681030156, + "grad_norm": 3.0867936611175537, + "learning_rate": 0.00016252364956175915, + "loss": 0.3563, + "step": 14560 + }, + { + "epoch": 0.5625699833970423, + "grad_norm": 1.4829964637756348, + "learning_rate": 0.00016249790854215736, + "loss": 0.3586, + "step": 14570 + }, + { + "epoch": 0.5629560986910691, + "grad_norm": 4.029405117034912, + "learning_rate": 0.00016247216752255557, + "loss": 0.5198, + "step": 14580 + }, + { + "epoch": 0.563342213985096, + "grad_norm": 2.5473573207855225, + "learning_rate": 0.00016244642650295379, + "loss": 0.3818, + "step": 14590 + }, + { + "epoch": 0.5637283292791228, + "grad_norm": 1.387529730796814, + "learning_rate": 0.000162420685483352, + "loss": 0.3453, + "step": 14600 + }, + { + "epoch": 0.5641144445731495, + "grad_norm": 3.2525246143341064, + "learning_rate": 0.0001623949444637502, + "loss": 0.6096, + "step": 14610 + }, + { + "epoch": 0.5645005598671763, + "grad_norm": 1.23868989944458, + "learning_rate": 0.00016236920344414843, + "loss": 0.2785, + "step": 14620 + }, + { + "epoch": 0.5648866751612032, + "grad_norm": 1.763416051864624, + "learning_rate": 0.00016234346242454667, + "loss": 0.2313, + "step": 14630 + }, + { + "epoch": 0.5652727904552299, + "grad_norm": 2.6030027866363525, + "learning_rate": 0.00016231772140494485, + "loss": 0.5002, + "step": 14640 + }, + { + "epoch": 0.5656589057492567, + "grad_norm": 4.345195770263672, + "learning_rate": 0.00016229198038534307, + "loss": 0.3433, + "step": 14650 + }, + { + "epoch": 0.5660450210432835, + "grad_norm": 1.8660558462142944, + "learning_rate": 0.00016226623936574128, + "loss": 0.325, + "step": 14660 + }, + { + "epoch": 0.5664311363373103, + "grad_norm": 2.504354953765869, + "learning_rate": 0.0001622404983461395, + "loss": 0.3013, + "step": 14670 + }, + { + "epoch": 0.5668172516313371, + "grad_norm": 1.715135097503662, + "learning_rate": 0.0001622147573265377, + "loss": 0.2396, + "step": 14680 + }, + { + "epoch": 0.5672033669253639, + "grad_norm": 0.5195931792259216, + "learning_rate": 0.00016218901630693592, + "loss": 0.4992, + "step": 14690 + }, + { + "epoch": 0.5675894822193908, + "grad_norm": 1.076095461845398, + "learning_rate": 0.00016216327528733416, + "loss": 0.4488, + "step": 14700 + }, + { + "epoch": 0.5679755975134175, + "grad_norm": 0.42350637912750244, + "learning_rate": 0.00016213753426773235, + "loss": 0.2798, + "step": 14710 + }, + { + "epoch": 0.5683617128074443, + "grad_norm": 2.8514647483825684, + "learning_rate": 0.00016211179324813056, + "loss": 0.3108, + "step": 14720 + }, + { + "epoch": 0.5687478281014711, + "grad_norm": 1.4496532678604126, + "learning_rate": 0.00016208605222852877, + "loss": 0.4197, + "step": 14730 + }, + { + "epoch": 0.5691339433954979, + "grad_norm": 2.513998031616211, + "learning_rate": 0.00016206031120892699, + "loss": 0.4931, + "step": 14740 + }, + { + "epoch": 0.5695200586895247, + "grad_norm": 1.5905802249908447, + "learning_rate": 0.0001620345701893252, + "loss": 0.2175, + "step": 14750 + }, + { + "epoch": 0.5699061739835515, + "grad_norm": 0.4657856523990631, + "learning_rate": 0.0001620088291697234, + "loss": 0.4753, + "step": 14760 + }, + { + "epoch": 0.5702922892775782, + "grad_norm": 1.4188594818115234, + "learning_rate": 0.00016198308815012165, + "loss": 0.4849, + "step": 14770 + }, + { + "epoch": 0.5706784045716051, + "grad_norm": 0.6742203235626221, + "learning_rate": 0.00016195734713051984, + "loss": 0.3451, + "step": 14780 + }, + { + "epoch": 0.5710645198656319, + "grad_norm": 1.521262526512146, + "learning_rate": 0.00016193160611091805, + "loss": 0.6253, + "step": 14790 + }, + { + "epoch": 0.5714506351596587, + "grad_norm": 0.8657771348953247, + "learning_rate": 0.00016190586509131627, + "loss": 0.3664, + "step": 14800 + }, + { + "epoch": 0.5718367504536854, + "grad_norm": 1.0131505727767944, + "learning_rate": 0.00016188012407171448, + "loss": 0.3395, + "step": 14810 + }, + { + "epoch": 0.5722228657477123, + "grad_norm": 0.8506319522857666, + "learning_rate": 0.00016185438305211272, + "loss": 0.2769, + "step": 14820 + }, + { + "epoch": 0.5726089810417391, + "grad_norm": 3.1080141067504883, + "learning_rate": 0.0001618286420325109, + "loss": 0.3185, + "step": 14830 + }, + { + "epoch": 0.5729950963357658, + "grad_norm": 0.8805003762245178, + "learning_rate": 0.00016180290101290915, + "loss": 0.3089, + "step": 14840 + }, + { + "epoch": 0.5733812116297926, + "grad_norm": 3.9470136165618896, + "learning_rate": 0.00016177715999330733, + "loss": 0.4552, + "step": 14850 + }, + { + "epoch": 0.5737673269238195, + "grad_norm": 1.10677969455719, + "learning_rate": 0.00016175141897370557, + "loss": 0.4624, + "step": 14860 + }, + { + "epoch": 0.5741534422178463, + "grad_norm": 1.185539960861206, + "learning_rate": 0.00016172567795410376, + "loss": 0.3144, + "step": 14870 + }, + { + "epoch": 0.574539557511873, + "grad_norm": 2.484386920928955, + "learning_rate": 0.00016169993693450197, + "loss": 0.3409, + "step": 14880 + }, + { + "epoch": 0.5749256728058998, + "grad_norm": 2.1621437072753906, + "learning_rate": 0.0001616741959149002, + "loss": 0.4421, + "step": 14890 + }, + { + "epoch": 0.5753117880999267, + "grad_norm": 1.3359025716781616, + "learning_rate": 0.0001616484548952984, + "loss": 0.2826, + "step": 14900 + }, + { + "epoch": 0.5756979033939534, + "grad_norm": 2.828157901763916, + "learning_rate": 0.00016162271387569664, + "loss": 0.4615, + "step": 14910 + }, + { + "epoch": 0.5760840186879802, + "grad_norm": 2.0543019771575928, + "learning_rate": 0.00016159697285609483, + "loss": 0.4307, + "step": 14920 + }, + { + "epoch": 0.5764701339820071, + "grad_norm": 0.3085225522518158, + "learning_rate": 0.00016157123183649307, + "loss": 0.317, + "step": 14930 + }, + { + "epoch": 0.5768562492760339, + "grad_norm": 1.459349274635315, + "learning_rate": 0.00016154549081689125, + "loss": 0.3928, + "step": 14940 + }, + { + "epoch": 0.5772423645700606, + "grad_norm": 0.6684612035751343, + "learning_rate": 0.00016151974979728947, + "loss": 0.4129, + "step": 14950 + }, + { + "epoch": 0.5776284798640874, + "grad_norm": 2.428311586380005, + "learning_rate": 0.0001614940087776877, + "loss": 0.4163, + "step": 14960 + }, + { + "epoch": 0.5780145951581143, + "grad_norm": 1.8885403871536255, + "learning_rate": 0.0001614682677580859, + "loss": 0.4311, + "step": 14970 + }, + { + "epoch": 0.578400710452141, + "grad_norm": 3.9598031044006348, + "learning_rate": 0.00016144252673848413, + "loss": 0.3103, + "step": 14980 + }, + { + "epoch": 0.5787868257461678, + "grad_norm": 1.872383713722229, + "learning_rate": 0.00016141678571888232, + "loss": 0.3592, + "step": 14990 + }, + { + "epoch": 0.5791729410401946, + "grad_norm": 1.023526668548584, + "learning_rate": 0.00016139104469928056, + "loss": 0.4185, + "step": 15000 + }, + { + "epoch": 0.5795590563342214, + "grad_norm": 1.5721429586410522, + "learning_rate": 0.00016136530367967877, + "loss": 0.4017, + "step": 15010 + }, + { + "epoch": 0.5799451716282482, + "grad_norm": 3.502350091934204, + "learning_rate": 0.00016133956266007696, + "loss": 0.397, + "step": 15020 + }, + { + "epoch": 0.580331286922275, + "grad_norm": 2.415985345840454, + "learning_rate": 0.0001613138216404752, + "loss": 0.4041, + "step": 15030 + }, + { + "epoch": 0.5807174022163017, + "grad_norm": 0.5441868901252747, + "learning_rate": 0.00016128808062087339, + "loss": 0.2395, + "step": 15040 + }, + { + "epoch": 0.5811035175103286, + "grad_norm": 2.453216552734375, + "learning_rate": 0.00016126233960127163, + "loss": 0.1586, + "step": 15050 + }, + { + "epoch": 0.5814896328043554, + "grad_norm": 3.108646869659424, + "learning_rate": 0.0001612365985816698, + "loss": 0.3996, + "step": 15060 + }, + { + "epoch": 0.5818757480983822, + "grad_norm": 0.7707905173301697, + "learning_rate": 0.00016121085756206805, + "loss": 0.1756, + "step": 15070 + }, + { + "epoch": 0.5822618633924089, + "grad_norm": 0.42857447266578674, + "learning_rate": 0.00016118511654246627, + "loss": 0.258, + "step": 15080 + }, + { + "epoch": 0.5826479786864358, + "grad_norm": 0.7148373126983643, + "learning_rate": 0.00016115937552286445, + "loss": 0.3755, + "step": 15090 + }, + { + "epoch": 0.5830340939804626, + "grad_norm": 0.04789021611213684, + "learning_rate": 0.0001611336345032627, + "loss": 0.2087, + "step": 15100 + }, + { + "epoch": 0.5834202092744893, + "grad_norm": 5.012516975402832, + "learning_rate": 0.00016110789348366088, + "loss": 0.5406, + "step": 15110 + }, + { + "epoch": 0.5838063245685161, + "grad_norm": 1.4139299392700195, + "learning_rate": 0.00016108215246405912, + "loss": 0.407, + "step": 15120 + }, + { + "epoch": 0.584192439862543, + "grad_norm": 0.8637074828147888, + "learning_rate": 0.00016105641144445733, + "loss": 0.2987, + "step": 15130 + }, + { + "epoch": 0.5845785551565698, + "grad_norm": 0.9556403160095215, + "learning_rate": 0.00016103067042485555, + "loss": 0.4319, + "step": 15140 + }, + { + "epoch": 0.5849646704505965, + "grad_norm": 2.071455955505371, + "learning_rate": 0.00016100492940525376, + "loss": 0.4824, + "step": 15150 + }, + { + "epoch": 0.5853507857446233, + "grad_norm": 3.8130764961242676, + "learning_rate": 0.00016097918838565195, + "loss": 0.4749, + "step": 15160 + }, + { + "epoch": 0.5857369010386502, + "grad_norm": 1.290738582611084, + "learning_rate": 0.00016095344736605019, + "loss": 0.4486, + "step": 15170 + }, + { + "epoch": 0.5861230163326769, + "grad_norm": 0.9964671730995178, + "learning_rate": 0.00016092770634644837, + "loss": 0.1514, + "step": 15180 + }, + { + "epoch": 0.5865091316267037, + "grad_norm": 0.5267524123191833, + "learning_rate": 0.0001609019653268466, + "loss": 0.2298, + "step": 15190 + }, + { + "epoch": 0.5868952469207306, + "grad_norm": 2.028960704803467, + "learning_rate": 0.00016087622430724483, + "loss": 0.2925, + "step": 15200 + }, + { + "epoch": 0.5872813622147574, + "grad_norm": 0.8423904776573181, + "learning_rate": 0.00016085048328764304, + "loss": 0.4221, + "step": 15210 + }, + { + "epoch": 0.5876674775087841, + "grad_norm": 1.9663047790527344, + "learning_rate": 0.00016082474226804125, + "loss": 0.3595, + "step": 15220 + }, + { + "epoch": 0.5880535928028109, + "grad_norm": 1.2221906185150146, + "learning_rate": 0.00016079900124843944, + "loss": 0.3174, + "step": 15230 + }, + { + "epoch": 0.5884397080968378, + "grad_norm": 2.138437032699585, + "learning_rate": 0.00016077326022883768, + "loss": 0.2993, + "step": 15240 + }, + { + "epoch": 0.5888258233908645, + "grad_norm": 1.8036224842071533, + "learning_rate": 0.00016074751920923586, + "loss": 0.3897, + "step": 15250 + }, + { + "epoch": 0.5892119386848913, + "grad_norm": 2.3102879524230957, + "learning_rate": 0.0001607217781896341, + "loss": 0.3713, + "step": 15260 + }, + { + "epoch": 0.5895980539789181, + "grad_norm": 1.40048348903656, + "learning_rate": 0.00016069603717003232, + "loss": 0.2701, + "step": 15270 + }, + { + "epoch": 0.589984169272945, + "grad_norm": 1.0581787824630737, + "learning_rate": 0.00016067029615043053, + "loss": 0.2529, + "step": 15280 + }, + { + "epoch": 0.5903702845669717, + "grad_norm": 0.668211042881012, + "learning_rate": 0.00016064455513082875, + "loss": 0.221, + "step": 15290 + }, + { + "epoch": 0.5907563998609985, + "grad_norm": 0.7950372099876404, + "learning_rate": 0.00016061881411122696, + "loss": 0.2405, + "step": 15300 + }, + { + "epoch": 0.5911425151550252, + "grad_norm": 1.8531723022460938, + "learning_rate": 0.00016059307309162517, + "loss": 0.3423, + "step": 15310 + }, + { + "epoch": 0.5915286304490521, + "grad_norm": 0.2071121335029602, + "learning_rate": 0.00016056733207202339, + "loss": 0.2923, + "step": 15320 + }, + { + "epoch": 0.5919147457430789, + "grad_norm": 2.4298369884490967, + "learning_rate": 0.0001605415910524216, + "loss": 0.531, + "step": 15330 + }, + { + "epoch": 0.5923008610371057, + "grad_norm": 3.2297933101654053, + "learning_rate": 0.0001605158500328198, + "loss": 0.2563, + "step": 15340 + }, + { + "epoch": 0.5926869763311324, + "grad_norm": 1.533225178718567, + "learning_rate": 0.00016049010901321803, + "loss": 0.2712, + "step": 15350 + }, + { + "epoch": 0.5930730916251593, + "grad_norm": 3.6168954372406006, + "learning_rate": 0.00016046436799361624, + "loss": 0.6428, + "step": 15360 + }, + { + "epoch": 0.5934592069191861, + "grad_norm": 0.8912101984024048, + "learning_rate": 0.00016043862697401445, + "loss": 0.2882, + "step": 15370 + }, + { + "epoch": 0.5938453222132128, + "grad_norm": 0.6782923936843872, + "learning_rate": 0.00016041288595441267, + "loss": 0.3451, + "step": 15380 + }, + { + "epoch": 0.5942314375072396, + "grad_norm": 2.7575254440307617, + "learning_rate": 0.00016038714493481088, + "loss": 0.272, + "step": 15390 + }, + { + "epoch": 0.5946175528012665, + "grad_norm": 1.8348017930984497, + "learning_rate": 0.0001603614039152091, + "loss": 0.286, + "step": 15400 + }, + { + "epoch": 0.5950036680952933, + "grad_norm": 3.1459157466888428, + "learning_rate": 0.0001603356628956073, + "loss": 0.2986, + "step": 15410 + }, + { + "epoch": 0.59538978338932, + "grad_norm": 2.0769810676574707, + "learning_rate": 0.00016030992187600552, + "loss": 0.5512, + "step": 15420 + }, + { + "epoch": 0.5957758986833468, + "grad_norm": 0.5503840446472168, + "learning_rate": 0.00016028418085640373, + "loss": 0.4117, + "step": 15430 + }, + { + "epoch": 0.5961620139773737, + "grad_norm": 1.9759409427642822, + "learning_rate": 0.00016025843983680194, + "loss": 0.3619, + "step": 15440 + }, + { + "epoch": 0.5965481292714004, + "grad_norm": 2.2451424598693848, + "learning_rate": 0.00016023269881720016, + "loss": 0.2798, + "step": 15450 + }, + { + "epoch": 0.5969342445654272, + "grad_norm": 1.8537431955337524, + "learning_rate": 0.00016020695779759837, + "loss": 0.3739, + "step": 15460 + }, + { + "epoch": 0.5973203598594541, + "grad_norm": 1.7667044401168823, + "learning_rate": 0.00016018121677799658, + "loss": 0.3694, + "step": 15470 + }, + { + "epoch": 0.5977064751534809, + "grad_norm": 0.8955661654472351, + "learning_rate": 0.0001601554757583948, + "loss": 0.2036, + "step": 15480 + }, + { + "epoch": 0.5980925904475076, + "grad_norm": 0.9526143074035645, + "learning_rate": 0.000160129734738793, + "loss": 0.3728, + "step": 15490 + }, + { + "epoch": 0.5984787057415344, + "grad_norm": 0.5816594958305359, + "learning_rate": 0.00016010399371919122, + "loss": 0.3263, + "step": 15500 + }, + { + "epoch": 0.5988648210355613, + "grad_norm": 0.6841669678688049, + "learning_rate": 0.00016007825269958944, + "loss": 0.3252, + "step": 15510 + }, + { + "epoch": 0.599250936329588, + "grad_norm": 0.9375134706497192, + "learning_rate": 0.00016005251167998765, + "loss": 0.2207, + "step": 15520 + }, + { + "epoch": 0.5996370516236148, + "grad_norm": 0.39535248279571533, + "learning_rate": 0.00016002677066038586, + "loss": 0.3566, + "step": 15530 + }, + { + "epoch": 0.6000231669176416, + "grad_norm": 0.5440202951431274, + "learning_rate": 0.00016000102964078408, + "loss": 0.2886, + "step": 15540 + }, + { + "epoch": 0.6004092822116684, + "grad_norm": 0.45111024379730225, + "learning_rate": 0.0001599752886211823, + "loss": 0.2697, + "step": 15550 + }, + { + "epoch": 0.6007953975056952, + "grad_norm": 2.372063398361206, + "learning_rate": 0.0001599495476015805, + "loss": 0.4716, + "step": 15560 + }, + { + "epoch": 0.601181512799722, + "grad_norm": 0.5841318368911743, + "learning_rate": 0.00015992380658197872, + "loss": 0.5611, + "step": 15570 + }, + { + "epoch": 0.6015676280937488, + "grad_norm": 0.323010116815567, + "learning_rate": 0.00015989806556237693, + "loss": 0.2733, + "step": 15580 + }, + { + "epoch": 0.6019537433877756, + "grad_norm": 1.4498323202133179, + "learning_rate": 0.00015987232454277514, + "loss": 0.4063, + "step": 15590 + }, + { + "epoch": 0.6023398586818024, + "grad_norm": 0.47180086374282837, + "learning_rate": 0.00015984658352317336, + "loss": 0.3322, + "step": 15600 + }, + { + "epoch": 0.6027259739758292, + "grad_norm": 1.2303547859191895, + "learning_rate": 0.00015982084250357157, + "loss": 0.2985, + "step": 15610 + }, + { + "epoch": 0.6031120892698559, + "grad_norm": 2.5056209564208984, + "learning_rate": 0.00015979510148396978, + "loss": 0.4903, + "step": 15620 + }, + { + "epoch": 0.6034982045638828, + "grad_norm": 1.344814419746399, + "learning_rate": 0.00015976936046436802, + "loss": 0.3806, + "step": 15630 + }, + { + "epoch": 0.6038843198579096, + "grad_norm": 3.2931411266326904, + "learning_rate": 0.0001597436194447662, + "loss": 0.3291, + "step": 15640 + }, + { + "epoch": 0.6042704351519363, + "grad_norm": 0.3108818829059601, + "learning_rate": 0.00015971787842516442, + "loss": 0.2866, + "step": 15650 + }, + { + "epoch": 0.6046565504459631, + "grad_norm": 2.6437489986419678, + "learning_rate": 0.00015969213740556264, + "loss": 0.2853, + "step": 15660 + }, + { + "epoch": 0.60504266573999, + "grad_norm": 0.6996239423751831, + "learning_rate": 0.00015966639638596085, + "loss": 0.2947, + "step": 15670 + }, + { + "epoch": 0.6054287810340168, + "grad_norm": 0.9377492070198059, + "learning_rate": 0.00015964065536635906, + "loss": 0.5502, + "step": 15680 + }, + { + "epoch": 0.6058148963280435, + "grad_norm": 0.3067781627178192, + "learning_rate": 0.00015961491434675728, + "loss": 0.2916, + "step": 15690 + }, + { + "epoch": 0.6062010116220703, + "grad_norm": 1.6191383600234985, + "learning_rate": 0.00015958917332715552, + "loss": 0.2536, + "step": 15700 + }, + { + "epoch": 0.6065871269160972, + "grad_norm": 0.5139639973640442, + "learning_rate": 0.0001595634323075537, + "loss": 0.2809, + "step": 15710 + }, + { + "epoch": 0.6069732422101239, + "grad_norm": 1.6476198434829712, + "learning_rate": 0.00015953769128795192, + "loss": 0.2859, + "step": 15720 + }, + { + "epoch": 0.6073593575041507, + "grad_norm": 3.895970106124878, + "learning_rate": 0.00015951195026835013, + "loss": 0.5254, + "step": 15730 + }, + { + "epoch": 0.6077454727981776, + "grad_norm": 1.1022089719772339, + "learning_rate": 0.00015948620924874834, + "loss": 0.3254, + "step": 15740 + }, + { + "epoch": 0.6081315880922044, + "grad_norm": 1.3811163902282715, + "learning_rate": 0.00015946046822914656, + "loss": 0.3259, + "step": 15750 + }, + { + "epoch": 0.6085177033862311, + "grad_norm": 1.0810881853103638, + "learning_rate": 0.00015943472720954477, + "loss": 0.3689, + "step": 15760 + }, + { + "epoch": 0.6089038186802579, + "grad_norm": 2.1972954273223877, + "learning_rate": 0.000159408986189943, + "loss": 0.3255, + "step": 15770 + }, + { + "epoch": 0.6092899339742848, + "grad_norm": 3.945215940475464, + "learning_rate": 0.0001593832451703412, + "loss": 0.4317, + "step": 15780 + }, + { + "epoch": 0.6096760492683115, + "grad_norm": 0.5246737599372864, + "learning_rate": 0.0001593575041507394, + "loss": 0.5895, + "step": 15790 + }, + { + "epoch": 0.6100621645623383, + "grad_norm": 0.0722908228635788, + "learning_rate": 0.00015933176313113762, + "loss": 0.2078, + "step": 15800 + }, + { + "epoch": 0.6104482798563651, + "grad_norm": 1.888529658317566, + "learning_rate": 0.00015930602211153584, + "loss": 0.3629, + "step": 15810 + }, + { + "epoch": 0.610834395150392, + "grad_norm": 0.7190912365913391, + "learning_rate": 0.00015928028109193408, + "loss": 0.4715, + "step": 15820 + }, + { + "epoch": 0.6112205104444187, + "grad_norm": 1.1624583005905151, + "learning_rate": 0.00015925454007233226, + "loss": 0.3678, + "step": 15830 + }, + { + "epoch": 0.6116066257384455, + "grad_norm": 1.4570399522781372, + "learning_rate": 0.0001592287990527305, + "loss": 0.3008, + "step": 15840 + }, + { + "epoch": 0.6119927410324723, + "grad_norm": 0.45386505126953125, + "learning_rate": 0.0001592030580331287, + "loss": 0.2919, + "step": 15850 + }, + { + "epoch": 0.6123788563264991, + "grad_norm": 1.9031087160110474, + "learning_rate": 0.0001591773170135269, + "loss": 0.4336, + "step": 15860 + }, + { + "epoch": 0.6127649716205259, + "grad_norm": 0.9156181216239929, + "learning_rate": 0.00015915157599392512, + "loss": 0.2451, + "step": 15870 + }, + { + "epoch": 0.6131510869145527, + "grad_norm": 1.8031158447265625, + "learning_rate": 0.00015912583497432333, + "loss": 0.259, + "step": 15880 + }, + { + "epoch": 0.6135372022085794, + "grad_norm": 1.4521692991256714, + "learning_rate": 0.00015910009395472157, + "loss": 0.279, + "step": 15890 + }, + { + "epoch": 0.6139233175026063, + "grad_norm": 1.5064165592193604, + "learning_rate": 0.00015907435293511976, + "loss": 0.34, + "step": 15900 + }, + { + "epoch": 0.6143094327966331, + "grad_norm": 0.588637113571167, + "learning_rate": 0.000159048611915518, + "loss": 0.4886, + "step": 15910 + }, + { + "epoch": 0.6146955480906598, + "grad_norm": 0.3159797191619873, + "learning_rate": 0.00015902287089591618, + "loss": 0.3265, + "step": 15920 + }, + { + "epoch": 0.6150816633846866, + "grad_norm": 3.3988165855407715, + "learning_rate": 0.0001589971298763144, + "loss": 0.4917, + "step": 15930 + }, + { + "epoch": 0.6154677786787135, + "grad_norm": 0.5139709711074829, + "learning_rate": 0.00015897138885671264, + "loss": 0.2175, + "step": 15940 + }, + { + "epoch": 0.6158538939727403, + "grad_norm": 3.6877944469451904, + "learning_rate": 0.00015894564783711082, + "loss": 0.4674, + "step": 15950 + }, + { + "epoch": 0.616240009266767, + "grad_norm": 1.6468040943145752, + "learning_rate": 0.00015891990681750906, + "loss": 0.4375, + "step": 15960 + }, + { + "epoch": 0.6166261245607938, + "grad_norm": 0.47542962431907654, + "learning_rate": 0.00015889416579790725, + "loss": 0.3593, + "step": 15970 + }, + { + "epoch": 0.6170122398548207, + "grad_norm": 2.210597038269043, + "learning_rate": 0.0001588684247783055, + "loss": 0.3356, + "step": 15980 + }, + { + "epoch": 0.6173983551488474, + "grad_norm": 2.0030908584594727, + "learning_rate": 0.00015884268375870368, + "loss": 0.3367, + "step": 15990 + }, + { + "epoch": 0.6177844704428742, + "grad_norm": 3.438887119293213, + "learning_rate": 0.0001588169427391019, + "loss": 0.4583, + "step": 16000 + }, + { + "epoch": 0.6181705857369011, + "grad_norm": 2.88147234916687, + "learning_rate": 0.00015879120171950013, + "loss": 0.3837, + "step": 16010 + }, + { + "epoch": 0.6185567010309279, + "grad_norm": 0.9327366948127747, + "learning_rate": 0.00015876546069989832, + "loss": 0.236, + "step": 16020 + }, + { + "epoch": 0.6189428163249546, + "grad_norm": 2.205355405807495, + "learning_rate": 0.00015873971968029656, + "loss": 0.2957, + "step": 16030 + }, + { + "epoch": 0.6193289316189814, + "grad_norm": 2.3861300945281982, + "learning_rate": 0.00015871397866069474, + "loss": 0.427, + "step": 16040 + }, + { + "epoch": 0.6197150469130083, + "grad_norm": 0.3276061415672302, + "learning_rate": 0.00015868823764109298, + "loss": 0.2301, + "step": 16050 + }, + { + "epoch": 0.620101162207035, + "grad_norm": 0.6325292587280273, + "learning_rate": 0.00015866249662149117, + "loss": 0.3915, + "step": 16060 + }, + { + "epoch": 0.6204872775010618, + "grad_norm": 1.1546003818511963, + "learning_rate": 0.0001586367556018894, + "loss": 0.4081, + "step": 16070 + }, + { + "epoch": 0.6208733927950886, + "grad_norm": 1.7260868549346924, + "learning_rate": 0.00015861101458228762, + "loss": 0.4642, + "step": 16080 + }, + { + "epoch": 0.6212595080891155, + "grad_norm": 0.30561816692352295, + "learning_rate": 0.0001585852735626858, + "loss": 0.2065, + "step": 16090 + }, + { + "epoch": 0.6216456233831422, + "grad_norm": 2.440007448196411, + "learning_rate": 0.00015855953254308405, + "loss": 0.3851, + "step": 16100 + }, + { + "epoch": 0.622031738677169, + "grad_norm": 4.06764554977417, + "learning_rate": 0.00015853379152348224, + "loss": 0.3796, + "step": 16110 + }, + { + "epoch": 0.6224178539711958, + "grad_norm": 4.970936298370361, + "learning_rate": 0.00015850805050388048, + "loss": 0.3393, + "step": 16120 + }, + { + "epoch": 0.6228039692652226, + "grad_norm": 2.0721356868743896, + "learning_rate": 0.0001584823094842787, + "loss": 0.2991, + "step": 16130 + }, + { + "epoch": 0.6231900845592494, + "grad_norm": 1.8130602836608887, + "learning_rate": 0.0001584565684646769, + "loss": 0.3132, + "step": 16140 + }, + { + "epoch": 0.6235761998532762, + "grad_norm": 1.1093038320541382, + "learning_rate": 0.00015843082744507512, + "loss": 0.3144, + "step": 16150 + }, + { + "epoch": 0.6239623151473029, + "grad_norm": 2.227937936782837, + "learning_rate": 0.0001584050864254733, + "loss": 0.4955, + "step": 16160 + }, + { + "epoch": 0.6243484304413298, + "grad_norm": 1.2541782855987549, + "learning_rate": 0.00015837934540587154, + "loss": 0.225, + "step": 16170 + }, + { + "epoch": 0.6247345457353566, + "grad_norm": 1.8113441467285156, + "learning_rate": 0.00015835360438626973, + "loss": 0.5393, + "step": 16180 + }, + { + "epoch": 0.6251206610293834, + "grad_norm": 2.198061466217041, + "learning_rate": 0.00015832786336666797, + "loss": 0.2225, + "step": 16190 + }, + { + "epoch": 0.6255067763234101, + "grad_norm": 2.4241714477539062, + "learning_rate": 0.00015830212234706618, + "loss": 0.3294, + "step": 16200 + }, + { + "epoch": 0.625892891617437, + "grad_norm": 3.0632903575897217, + "learning_rate": 0.0001582763813274644, + "loss": 0.3776, + "step": 16210 + }, + { + "epoch": 0.6262790069114638, + "grad_norm": 1.9433149099349976, + "learning_rate": 0.0001582506403078626, + "loss": 0.2964, + "step": 16220 + }, + { + "epoch": 0.6266651222054905, + "grad_norm": 1.6328935623168945, + "learning_rate": 0.0001582248992882608, + "loss": 0.2169, + "step": 16230 + }, + { + "epoch": 0.6270512374995174, + "grad_norm": 0.926477313041687, + "learning_rate": 0.00015819915826865904, + "loss": 0.271, + "step": 16240 + }, + { + "epoch": 0.6274373527935442, + "grad_norm": 3.4526686668395996, + "learning_rate": 0.00015817341724905722, + "loss": 0.1836, + "step": 16250 + }, + { + "epoch": 0.627823468087571, + "grad_norm": 0.8149943947792053, + "learning_rate": 0.00015814767622945546, + "loss": 0.2494, + "step": 16260 + }, + { + "epoch": 0.6282095833815977, + "grad_norm": 0.8609616756439209, + "learning_rate": 0.00015812193520985368, + "loss": 0.279, + "step": 16270 + }, + { + "epoch": 0.6285956986756246, + "grad_norm": 0.9432594180107117, + "learning_rate": 0.0001580961941902519, + "loss": 0.3485, + "step": 16280 + }, + { + "epoch": 0.6289818139696514, + "grad_norm": 2.8844246864318848, + "learning_rate": 0.0001580704531706501, + "loss": 0.6163, + "step": 16290 + }, + { + "epoch": 0.6293679292636781, + "grad_norm": 3.4325804710388184, + "learning_rate": 0.0001580447121510483, + "loss": 0.3392, + "step": 16300 + }, + { + "epoch": 0.6297540445577049, + "grad_norm": 0.5320155620574951, + "learning_rate": 0.00015801897113144653, + "loss": 0.4664, + "step": 16310 + }, + { + "epoch": 0.6301401598517318, + "grad_norm": 1.075914740562439, + "learning_rate": 0.00015799323011184474, + "loss": 0.3023, + "step": 16320 + }, + { + "epoch": 0.6305262751457585, + "grad_norm": 1.3115136623382568, + "learning_rate": 0.00015796748909224296, + "loss": 0.1741, + "step": 16330 + }, + { + "epoch": 0.6309123904397853, + "grad_norm": 0.18413056433200836, + "learning_rate": 0.00015794174807264117, + "loss": 0.1966, + "step": 16340 + }, + { + "epoch": 0.6312985057338121, + "grad_norm": 1.8707069158554077, + "learning_rate": 0.00015791600705303938, + "loss": 0.3175, + "step": 16350 + }, + { + "epoch": 0.631684621027839, + "grad_norm": 2.2436699867248535, + "learning_rate": 0.0001578902660334376, + "loss": 0.2619, + "step": 16360 + }, + { + "epoch": 0.6320707363218657, + "grad_norm": 2.6100945472717285, + "learning_rate": 0.00015786452501383578, + "loss": 0.2874, + "step": 16370 + }, + { + "epoch": 0.6324568516158925, + "grad_norm": 1.455538034439087, + "learning_rate": 0.00015783878399423402, + "loss": 0.3956, + "step": 16380 + }, + { + "epoch": 0.6328429669099193, + "grad_norm": 1.1950361728668213, + "learning_rate": 0.00015781304297463224, + "loss": 0.3406, + "step": 16390 + }, + { + "epoch": 0.6332290822039461, + "grad_norm": 0.6905789971351624, + "learning_rate": 0.00015778730195503045, + "loss": 0.2788, + "step": 16400 + }, + { + "epoch": 0.6336151974979729, + "grad_norm": 1.8803350925445557, + "learning_rate": 0.00015776156093542866, + "loss": 0.5509, + "step": 16410 + }, + { + "epoch": 0.6340013127919997, + "grad_norm": 4.088913440704346, + "learning_rate": 0.00015773581991582688, + "loss": 0.5238, + "step": 16420 + }, + { + "epoch": 0.6343874280860264, + "grad_norm": 2.9464988708496094, + "learning_rate": 0.0001577100788962251, + "loss": 0.4721, + "step": 16430 + }, + { + "epoch": 0.6347735433800533, + "grad_norm": 2.005481719970703, + "learning_rate": 0.0001576843378766233, + "loss": 0.323, + "step": 16440 + }, + { + "epoch": 0.6351596586740801, + "grad_norm": 0.1693512350320816, + "learning_rate": 0.00015765859685702152, + "loss": 0.3459, + "step": 16450 + }, + { + "epoch": 0.6355457739681069, + "grad_norm": 1.6552183628082275, + "learning_rate": 0.00015763285583741973, + "loss": 0.4299, + "step": 16460 + }, + { + "epoch": 0.6359318892621336, + "grad_norm": 0.8498923182487488, + "learning_rate": 0.00015760711481781794, + "loss": 0.3665, + "step": 16470 + }, + { + "epoch": 0.6363180045561605, + "grad_norm": 1.098840594291687, + "learning_rate": 0.00015758137379821616, + "loss": 0.318, + "step": 16480 + }, + { + "epoch": 0.6367041198501873, + "grad_norm": 2.69606876373291, + "learning_rate": 0.00015755563277861437, + "loss": 0.3566, + "step": 16490 + }, + { + "epoch": 0.637090235144214, + "grad_norm": 1.4099249839782715, + "learning_rate": 0.00015752989175901258, + "loss": 0.2658, + "step": 16500 + }, + { + "epoch": 0.6374763504382409, + "grad_norm": 0.10336513817310333, + "learning_rate": 0.0001575041507394108, + "loss": 0.4608, + "step": 16510 + }, + { + "epoch": 0.6378624657322677, + "grad_norm": 2.224609136581421, + "learning_rate": 0.000157478409719809, + "loss": 0.2875, + "step": 16520 + }, + { + "epoch": 0.6382485810262944, + "grad_norm": 1.3182893991470337, + "learning_rate": 0.00015745266870020722, + "loss": 0.2972, + "step": 16530 + }, + { + "epoch": 0.6386346963203212, + "grad_norm": 0.8028392195701599, + "learning_rate": 0.00015742692768060544, + "loss": 0.3553, + "step": 16540 + }, + { + "epoch": 0.6390208116143481, + "grad_norm": 2.6714046001434326, + "learning_rate": 0.00015740118666100365, + "loss": 0.3965, + "step": 16550 + }, + { + "epoch": 0.6394069269083749, + "grad_norm": 0.6173273921012878, + "learning_rate": 0.00015737544564140186, + "loss": 0.4278, + "step": 16560 + }, + { + "epoch": 0.6397930422024016, + "grad_norm": 0.9547831416130066, + "learning_rate": 0.00015734970462180008, + "loss": 0.3854, + "step": 16570 + }, + { + "epoch": 0.6401791574964284, + "grad_norm": 1.1336010694503784, + "learning_rate": 0.0001573239636021983, + "loss": 0.3505, + "step": 16580 + }, + { + "epoch": 0.6405652727904553, + "grad_norm": 4.911902904510498, + "learning_rate": 0.0001572982225825965, + "loss": 0.6624, + "step": 16590 + }, + { + "epoch": 0.640951388084482, + "grad_norm": 2.472303628921509, + "learning_rate": 0.00015727248156299472, + "loss": 0.4479, + "step": 16600 + }, + { + "epoch": 0.6413375033785088, + "grad_norm": 1.7077019214630127, + "learning_rate": 0.00015724674054339293, + "loss": 0.3985, + "step": 16610 + }, + { + "epoch": 0.6417236186725356, + "grad_norm": 4.096541404724121, + "learning_rate": 0.00015722099952379114, + "loss": 0.4763, + "step": 16620 + }, + { + "epoch": 0.6421097339665625, + "grad_norm": 1.5275769233703613, + "learning_rate": 0.00015719525850418936, + "loss": 0.3792, + "step": 16630 + }, + { + "epoch": 0.6424958492605892, + "grad_norm": 0.33548179268836975, + "learning_rate": 0.00015716951748458757, + "loss": 0.3276, + "step": 16640 + }, + { + "epoch": 0.642881964554616, + "grad_norm": 4.142831325531006, + "learning_rate": 0.00015714377646498578, + "loss": 0.4906, + "step": 16650 + }, + { + "epoch": 0.6432680798486428, + "grad_norm": 1.5129270553588867, + "learning_rate": 0.000157118035445384, + "loss": 0.3077, + "step": 16660 + }, + { + "epoch": 0.6436541951426696, + "grad_norm": 2.0287647247314453, + "learning_rate": 0.0001570922944257822, + "loss": 0.43, + "step": 16670 + }, + { + "epoch": 0.6440403104366964, + "grad_norm": 2.1278579235076904, + "learning_rate": 0.00015706655340618042, + "loss": 0.3822, + "step": 16680 + }, + { + "epoch": 0.6444264257307232, + "grad_norm": 0.7621383666992188, + "learning_rate": 0.00015704081238657864, + "loss": 0.2556, + "step": 16690 + }, + { + "epoch": 0.6448125410247499, + "grad_norm": 1.888422966003418, + "learning_rate": 0.00015701507136697685, + "loss": 0.4687, + "step": 16700 + }, + { + "epoch": 0.6451986563187768, + "grad_norm": 2.09405517578125, + "learning_rate": 0.00015698933034737506, + "loss": 0.4447, + "step": 16710 + }, + { + "epoch": 0.6455847716128036, + "grad_norm": 0.8639073967933655, + "learning_rate": 0.00015696358932777328, + "loss": 0.224, + "step": 16720 + }, + { + "epoch": 0.6459708869068304, + "grad_norm": 1.1472411155700684, + "learning_rate": 0.0001569378483081715, + "loss": 0.315, + "step": 16730 + }, + { + "epoch": 0.6463570022008571, + "grad_norm": 2.4987192153930664, + "learning_rate": 0.0001569121072885697, + "loss": 0.3707, + "step": 16740 + }, + { + "epoch": 0.646743117494884, + "grad_norm": 1.661458134651184, + "learning_rate": 0.00015688636626896792, + "loss": 0.4027, + "step": 16750 + }, + { + "epoch": 0.6471292327889108, + "grad_norm": 1.5494720935821533, + "learning_rate": 0.00015686062524936613, + "loss": 0.3737, + "step": 16760 + }, + { + "epoch": 0.6475153480829375, + "grad_norm": 0.3376433253288269, + "learning_rate": 0.00015683488422976437, + "loss": 0.3468, + "step": 16770 + }, + { + "epoch": 0.6479014633769644, + "grad_norm": 0.6496528387069702, + "learning_rate": 0.00015680914321016256, + "loss": 0.2857, + "step": 16780 + }, + { + "epoch": 0.6482875786709912, + "grad_norm": 1.957741618156433, + "learning_rate": 0.00015678340219056077, + "loss": 0.5089, + "step": 16790 + }, + { + "epoch": 0.648673693965018, + "grad_norm": 3.980466842651367, + "learning_rate": 0.00015675766117095898, + "loss": 0.3558, + "step": 16800 + }, + { + "epoch": 0.6490598092590447, + "grad_norm": 3.2516696453094482, + "learning_rate": 0.0001567319201513572, + "loss": 0.6214, + "step": 16810 + }, + { + "epoch": 0.6494459245530716, + "grad_norm": 0.6847260594367981, + "learning_rate": 0.00015670617913175544, + "loss": 0.3681, + "step": 16820 + }, + { + "epoch": 0.6498320398470984, + "grad_norm": 3.0918118953704834, + "learning_rate": 0.00015668043811215362, + "loss": 0.3608, + "step": 16830 + }, + { + "epoch": 0.6502181551411251, + "grad_norm": 1.1295204162597656, + "learning_rate": 0.00015665469709255186, + "loss": 0.3189, + "step": 16840 + }, + { + "epoch": 0.6506042704351519, + "grad_norm": 1.803222417831421, + "learning_rate": 0.00015662895607295005, + "loss": 0.3138, + "step": 16850 + }, + { + "epoch": 0.6509903857291788, + "grad_norm": 3.157122850418091, + "learning_rate": 0.00015660321505334826, + "loss": 0.4342, + "step": 16860 + }, + { + "epoch": 0.6513765010232055, + "grad_norm": 2.6584184169769287, + "learning_rate": 0.00015657747403374648, + "loss": 0.211, + "step": 16870 + }, + { + "epoch": 0.6517626163172323, + "grad_norm": 0.763903021812439, + "learning_rate": 0.0001565517330141447, + "loss": 0.2873, + "step": 16880 + }, + { + "epoch": 0.6521487316112591, + "grad_norm": 2.5033602714538574, + "learning_rate": 0.00015652599199454293, + "loss": 0.4475, + "step": 16890 + }, + { + "epoch": 0.652534846905286, + "grad_norm": 2.088690757751465, + "learning_rate": 0.00015650025097494112, + "loss": 0.3022, + "step": 16900 + }, + { + "epoch": 0.6529209621993127, + "grad_norm": 1.596064567565918, + "learning_rate": 0.00015647450995533936, + "loss": 0.3771, + "step": 16910 + }, + { + "epoch": 0.6533070774933395, + "grad_norm": 1.2658660411834717, + "learning_rate": 0.00015644876893573754, + "loss": 0.4793, + "step": 16920 + }, + { + "epoch": 0.6536931927873663, + "grad_norm": 1.5343844890594482, + "learning_rate": 0.00015642302791613576, + "loss": 0.5026, + "step": 16930 + }, + { + "epoch": 0.6540793080813931, + "grad_norm": 0.4736674129962921, + "learning_rate": 0.000156397286896534, + "loss": 0.2269, + "step": 16940 + }, + { + "epoch": 0.6544654233754199, + "grad_norm": 0.05510171130299568, + "learning_rate": 0.00015637154587693218, + "loss": 0.2398, + "step": 16950 + }, + { + "epoch": 0.6548515386694467, + "grad_norm": 0.641941249370575, + "learning_rate": 0.00015634580485733042, + "loss": 0.3862, + "step": 16960 + }, + { + "epoch": 0.6552376539634734, + "grad_norm": 1.5418890714645386, + "learning_rate": 0.0001563200638377286, + "loss": 0.2688, + "step": 16970 + }, + { + "epoch": 0.6556237692575003, + "grad_norm": 3.46284818649292, + "learning_rate": 0.00015629432281812685, + "loss": 0.5199, + "step": 16980 + }, + { + "epoch": 0.6560098845515271, + "grad_norm": 0.3225530683994293, + "learning_rate": 0.00015626858179852504, + "loss": 0.5035, + "step": 16990 + }, + { + "epoch": 0.6563959998455539, + "grad_norm": 0.9385218620300293, + "learning_rate": 0.00015624284077892325, + "loss": 0.2618, + "step": 17000 + }, + { + "epoch": 0.6567821151395806, + "grad_norm": 0.8849124312400818, + "learning_rate": 0.0001562170997593215, + "loss": 0.3711, + "step": 17010 + }, + { + "epoch": 0.6571682304336075, + "grad_norm": 2.2706375122070312, + "learning_rate": 0.00015619135873971968, + "loss": 0.2666, + "step": 17020 + }, + { + "epoch": 0.6575543457276343, + "grad_norm": 2.1923744678497314, + "learning_rate": 0.00015616561772011792, + "loss": 0.2038, + "step": 17030 + }, + { + "epoch": 0.657940461021661, + "grad_norm": 1.2356051206588745, + "learning_rate": 0.0001561398767005161, + "loss": 0.4103, + "step": 17040 + }, + { + "epoch": 0.6583265763156879, + "grad_norm": 1.583095669746399, + "learning_rate": 0.00015611413568091434, + "loss": 0.4164, + "step": 17050 + }, + { + "epoch": 0.6587126916097147, + "grad_norm": 1.0823155641555786, + "learning_rate": 0.00015608839466131253, + "loss": 0.3898, + "step": 17060 + }, + { + "epoch": 0.6590988069037415, + "grad_norm": 1.9568531513214111, + "learning_rate": 0.00015606265364171074, + "loss": 0.3553, + "step": 17070 + }, + { + "epoch": 0.6594849221977682, + "grad_norm": 3.576362371444702, + "learning_rate": 0.00015603691262210898, + "loss": 0.2693, + "step": 17080 + }, + { + "epoch": 0.6598710374917951, + "grad_norm": 0.2432270646095276, + "learning_rate": 0.00015601117160250717, + "loss": 0.2993, + "step": 17090 + }, + { + "epoch": 0.6602571527858219, + "grad_norm": 1.5935213565826416, + "learning_rate": 0.0001559854305829054, + "loss": 0.3295, + "step": 17100 + }, + { + "epoch": 0.6606432680798486, + "grad_norm": 0.09780561178922653, + "learning_rate": 0.0001559596895633036, + "loss": 0.2891, + "step": 17110 + }, + { + "epoch": 0.6610293833738754, + "grad_norm": 0.5332283973693848, + "learning_rate": 0.00015593394854370184, + "loss": 0.372, + "step": 17120 + }, + { + "epoch": 0.6614154986679023, + "grad_norm": 1.1921123266220093, + "learning_rate": 0.00015590820752410005, + "loss": 0.3155, + "step": 17130 + }, + { + "epoch": 0.661801613961929, + "grad_norm": 0.35267120599746704, + "learning_rate": 0.00015588246650449823, + "loss": 0.3795, + "step": 17140 + }, + { + "epoch": 0.6621877292559558, + "grad_norm": 0.4876207411289215, + "learning_rate": 0.00015585672548489648, + "loss": 0.2717, + "step": 17150 + }, + { + "epoch": 0.6625738445499826, + "grad_norm": 0.9866208434104919, + "learning_rate": 0.00015583098446529466, + "loss": 0.4121, + "step": 17160 + }, + { + "epoch": 0.6629599598440095, + "grad_norm": 3.0264835357666016, + "learning_rate": 0.0001558052434456929, + "loss": 0.5356, + "step": 17170 + }, + { + "epoch": 0.6633460751380362, + "grad_norm": 2.4786953926086426, + "learning_rate": 0.0001557795024260911, + "loss": 0.2086, + "step": 17180 + }, + { + "epoch": 0.663732190432063, + "grad_norm": 2.3706555366516113, + "learning_rate": 0.00015575376140648933, + "loss": 0.5224, + "step": 17190 + }, + { + "epoch": 0.6641183057260898, + "grad_norm": 2.6375296115875244, + "learning_rate": 0.00015572802038688754, + "loss": 0.3625, + "step": 17200 + }, + { + "epoch": 0.6645044210201166, + "grad_norm": 0.5592703819274902, + "learning_rate": 0.00015570227936728573, + "loss": 0.3831, + "step": 17210 + }, + { + "epoch": 0.6648905363141434, + "grad_norm": 2.309683322906494, + "learning_rate": 0.00015567653834768397, + "loss": 0.4039, + "step": 17220 + }, + { + "epoch": 0.6652766516081702, + "grad_norm": 2.3134100437164307, + "learning_rate": 0.00015565079732808215, + "loss": 0.1904, + "step": 17230 + }, + { + "epoch": 0.665662766902197, + "grad_norm": 2.232910633087158, + "learning_rate": 0.0001556250563084804, + "loss": 0.34, + "step": 17240 + }, + { + "epoch": 0.6660488821962238, + "grad_norm": 0.4798373579978943, + "learning_rate": 0.0001555993152888786, + "loss": 0.3143, + "step": 17250 + }, + { + "epoch": 0.6664349974902506, + "grad_norm": 2.071753740310669, + "learning_rate": 0.00015557357426927682, + "loss": 0.2645, + "step": 17260 + }, + { + "epoch": 0.6668211127842774, + "grad_norm": 2.1930956840515137, + "learning_rate": 0.00015554783324967504, + "loss": 0.4144, + "step": 17270 + }, + { + "epoch": 0.6672072280783041, + "grad_norm": 1.7874137163162231, + "learning_rate": 0.00015552209223007325, + "loss": 0.273, + "step": 17280 + }, + { + "epoch": 0.667593343372331, + "grad_norm": 1.264596939086914, + "learning_rate": 0.00015549635121047146, + "loss": 0.4186, + "step": 17290 + }, + { + "epoch": 0.6679794586663578, + "grad_norm": 0.5612212419509888, + "learning_rate": 0.00015547061019086965, + "loss": 0.2802, + "step": 17300 + }, + { + "epoch": 0.6683655739603845, + "grad_norm": 1.3782585859298706, + "learning_rate": 0.0001554448691712679, + "loss": 0.3712, + "step": 17310 + }, + { + "epoch": 0.6687516892544114, + "grad_norm": 1.5178605318069458, + "learning_rate": 0.0001554191281516661, + "loss": 0.1694, + "step": 17320 + }, + { + "epoch": 0.6691378045484382, + "grad_norm": 2.1221604347229004, + "learning_rate": 0.00015539338713206432, + "loss": 0.4418, + "step": 17330 + }, + { + "epoch": 0.669523919842465, + "grad_norm": 1.570734977722168, + "learning_rate": 0.00015536764611246253, + "loss": 0.4037, + "step": 17340 + }, + { + "epoch": 0.6699100351364917, + "grad_norm": 0.6928157806396484, + "learning_rate": 0.00015534190509286074, + "loss": 0.5293, + "step": 17350 + }, + { + "epoch": 0.6702961504305186, + "grad_norm": 0.8526401519775391, + "learning_rate": 0.00015531616407325895, + "loss": 0.348, + "step": 17360 + }, + { + "epoch": 0.6706822657245454, + "grad_norm": 1.7482202053070068, + "learning_rate": 0.00015529042305365714, + "loss": 0.352, + "step": 17370 + }, + { + "epoch": 0.6710683810185721, + "grad_norm": 1.724870204925537, + "learning_rate": 0.00015526468203405538, + "loss": 0.3589, + "step": 17380 + }, + { + "epoch": 0.6714544963125989, + "grad_norm": 3.125180721282959, + "learning_rate": 0.0001552389410144536, + "loss": 0.3063, + "step": 17390 + }, + { + "epoch": 0.6718406116066258, + "grad_norm": 2.0817360877990723, + "learning_rate": 0.0001552131999948518, + "loss": 0.2217, + "step": 17400 + }, + { + "epoch": 0.6722267269006525, + "grad_norm": 0.12367000430822372, + "learning_rate": 0.00015518745897525002, + "loss": 0.1691, + "step": 17410 + }, + { + "epoch": 0.6726128421946793, + "grad_norm": 0.23093344271183014, + "learning_rate": 0.00015516171795564823, + "loss": 0.2496, + "step": 17420 + }, + { + "epoch": 0.6729989574887061, + "grad_norm": 3.1588997840881348, + "learning_rate": 0.00015513597693604645, + "loss": 0.2868, + "step": 17430 + }, + { + "epoch": 0.673385072782733, + "grad_norm": 1.471999168395996, + "learning_rate": 0.00015511023591644466, + "loss": 0.2785, + "step": 17440 + }, + { + "epoch": 0.6737711880767597, + "grad_norm": 0.4500691294670105, + "learning_rate": 0.00015508449489684287, + "loss": 0.3218, + "step": 17450 + }, + { + "epoch": 0.6741573033707865, + "grad_norm": 2.65533709526062, + "learning_rate": 0.0001550587538772411, + "loss": 0.3194, + "step": 17460 + }, + { + "epoch": 0.6745434186648133, + "grad_norm": 0.45398348569869995, + "learning_rate": 0.0001550330128576393, + "loss": 0.199, + "step": 17470 + }, + { + "epoch": 0.6749295339588401, + "grad_norm": 0.21518200635910034, + "learning_rate": 0.00015500727183803751, + "loss": 0.3043, + "step": 17480 + }, + { + "epoch": 0.6753156492528669, + "grad_norm": 0.13117246329784393, + "learning_rate": 0.00015498153081843573, + "loss": 0.1872, + "step": 17490 + }, + { + "epoch": 0.6757017645468937, + "grad_norm": 0.4857695996761322, + "learning_rate": 0.00015495578979883394, + "loss": 0.5992, + "step": 17500 + }, + { + "epoch": 0.6760878798409204, + "grad_norm": 2.4992752075195312, + "learning_rate": 0.00015493004877923215, + "loss": 0.5057, + "step": 17510 + }, + { + "epoch": 0.6764739951349473, + "grad_norm": 1.9614732265472412, + "learning_rate": 0.00015490430775963037, + "loss": 0.3169, + "step": 17520 + }, + { + "epoch": 0.6768601104289741, + "grad_norm": 0.14168275892734528, + "learning_rate": 0.00015487856674002858, + "loss": 0.271, + "step": 17530 + }, + { + "epoch": 0.6772462257230009, + "grad_norm": 4.064804553985596, + "learning_rate": 0.0001548528257204268, + "loss": 0.3316, + "step": 17540 + }, + { + "epoch": 0.6776323410170277, + "grad_norm": 3.2959964275360107, + "learning_rate": 0.000154827084700825, + "loss": 0.5148, + "step": 17550 + }, + { + "epoch": 0.6780184563110545, + "grad_norm": 0.6234021186828613, + "learning_rate": 0.00015480134368122322, + "loss": 0.362, + "step": 17560 + }, + { + "epoch": 0.6784045716050813, + "grad_norm": 0.642573356628418, + "learning_rate": 0.00015477560266162143, + "loss": 0.2982, + "step": 17570 + }, + { + "epoch": 0.678790686899108, + "grad_norm": 1.098667025566101, + "learning_rate": 0.00015474986164201965, + "loss": 0.1875, + "step": 17580 + }, + { + "epoch": 0.6791768021931349, + "grad_norm": 2.378192186355591, + "learning_rate": 0.00015472412062241786, + "loss": 0.2533, + "step": 17590 + }, + { + "epoch": 0.6795629174871617, + "grad_norm": 1.1783161163330078, + "learning_rate": 0.00015469837960281607, + "loss": 0.402, + "step": 17600 + }, + { + "epoch": 0.6799490327811885, + "grad_norm": 0.1861846148967743, + "learning_rate": 0.0001546726385832143, + "loss": 0.2481, + "step": 17610 + }, + { + "epoch": 0.6803351480752152, + "grad_norm": 0.5785403847694397, + "learning_rate": 0.0001546468975636125, + "loss": 0.375, + "step": 17620 + }, + { + "epoch": 0.6807212633692421, + "grad_norm": 1.9201544523239136, + "learning_rate": 0.00015462115654401071, + "loss": 0.2757, + "step": 17630 + }, + { + "epoch": 0.6811073786632689, + "grad_norm": 2.461735963821411, + "learning_rate": 0.00015459541552440893, + "loss": 0.2193, + "step": 17640 + }, + { + "epoch": 0.6814934939572956, + "grad_norm": 2.007638454437256, + "learning_rate": 0.00015456967450480714, + "loss": 0.3561, + "step": 17650 + }, + { + "epoch": 0.6818796092513224, + "grad_norm": 1.3581938743591309, + "learning_rate": 0.00015454393348520535, + "loss": 0.3736, + "step": 17660 + }, + { + "epoch": 0.6822657245453493, + "grad_norm": 0.5637246966362, + "learning_rate": 0.00015451819246560357, + "loss": 0.3116, + "step": 17670 + }, + { + "epoch": 0.682651839839376, + "grad_norm": 1.409740924835205, + "learning_rate": 0.00015449245144600178, + "loss": 0.3281, + "step": 17680 + }, + { + "epoch": 0.6830379551334028, + "grad_norm": 3.2064149379730225, + "learning_rate": 0.0001544667104264, + "loss": 0.4427, + "step": 17690 + }, + { + "epoch": 0.6834240704274296, + "grad_norm": 0.9369992613792419, + "learning_rate": 0.0001544409694067982, + "loss": 0.3424, + "step": 17700 + }, + { + "epoch": 0.6838101857214565, + "grad_norm": 2.4149889945983887, + "learning_rate": 0.00015441522838719642, + "loss": 0.4074, + "step": 17710 + }, + { + "epoch": 0.6841963010154832, + "grad_norm": 0.688360869884491, + "learning_rate": 0.00015438948736759463, + "loss": 0.2205, + "step": 17720 + }, + { + "epoch": 0.68458241630951, + "grad_norm": 2.1444098949432373, + "learning_rate": 0.00015436374634799285, + "loss": 0.5942, + "step": 17730 + }, + { + "epoch": 0.6849685316035368, + "grad_norm": 1.8053444623947144, + "learning_rate": 0.00015433800532839106, + "loss": 0.1665, + "step": 17740 + }, + { + "epoch": 0.6853546468975636, + "grad_norm": 3.5637879371643066, + "learning_rate": 0.0001543122643087893, + "loss": 0.4972, + "step": 17750 + }, + { + "epoch": 0.6857407621915904, + "grad_norm": 1.846845030784607, + "learning_rate": 0.0001542865232891875, + "loss": 0.4105, + "step": 17760 + }, + { + "epoch": 0.6861268774856172, + "grad_norm": 2.2459189891815186, + "learning_rate": 0.0001542607822695857, + "loss": 0.253, + "step": 17770 + }, + { + "epoch": 0.686512992779644, + "grad_norm": 2.3160414695739746, + "learning_rate": 0.00015423504124998391, + "loss": 0.1905, + "step": 17780 + }, + { + "epoch": 0.6868991080736708, + "grad_norm": 1.2804152965545654, + "learning_rate": 0.00015420930023038213, + "loss": 0.2283, + "step": 17790 + }, + { + "epoch": 0.6872852233676976, + "grad_norm": 1.7174758911132812, + "learning_rate": 0.00015418355921078034, + "loss": 0.49, + "step": 17800 + }, + { + "epoch": 0.6876713386617244, + "grad_norm": 3.057098627090454, + "learning_rate": 0.00015415781819117855, + "loss": 0.4398, + "step": 17810 + }, + { + "epoch": 0.6880574539557512, + "grad_norm": 0.9112808704376221, + "learning_rate": 0.0001541320771715768, + "loss": 0.203, + "step": 17820 + }, + { + "epoch": 0.688443569249778, + "grad_norm": 2.899599313735962, + "learning_rate": 0.00015410633615197498, + "loss": 0.4212, + "step": 17830 + }, + { + "epoch": 0.6888296845438048, + "grad_norm": 1.8084157705307007, + "learning_rate": 0.0001540805951323732, + "loss": 0.4948, + "step": 17840 + }, + { + "epoch": 0.6892157998378315, + "grad_norm": 1.2151083946228027, + "learning_rate": 0.0001540548541127714, + "loss": 0.4254, + "step": 17850 + }, + { + "epoch": 0.6896019151318584, + "grad_norm": 1.20271897315979, + "learning_rate": 0.00015402911309316962, + "loss": 0.1753, + "step": 17860 + }, + { + "epoch": 0.6899880304258852, + "grad_norm": 0.7688419222831726, + "learning_rate": 0.00015400337207356783, + "loss": 0.4066, + "step": 17870 + }, + { + "epoch": 0.690374145719912, + "grad_norm": 0.8648087978363037, + "learning_rate": 0.00015397763105396605, + "loss": 0.5405, + "step": 17880 + }, + { + "epoch": 0.6907602610139387, + "grad_norm": 1.5501036643981934, + "learning_rate": 0.0001539518900343643, + "loss": 0.3761, + "step": 17890 + }, + { + "epoch": 0.6911463763079656, + "grad_norm": 0.5476267337799072, + "learning_rate": 0.00015392614901476247, + "loss": 0.2256, + "step": 17900 + }, + { + "epoch": 0.6915324916019924, + "grad_norm": 2.0248584747314453, + "learning_rate": 0.0001539004079951607, + "loss": 0.5449, + "step": 17910 + }, + { + "epoch": 0.6919186068960191, + "grad_norm": 1.688596248626709, + "learning_rate": 0.0001538746669755589, + "loss": 0.262, + "step": 17920 + }, + { + "epoch": 0.6923047221900459, + "grad_norm": 0.42646175622940063, + "learning_rate": 0.00015384892595595711, + "loss": 0.3714, + "step": 17930 + }, + { + "epoch": 0.6926908374840728, + "grad_norm": 0.9620506167411804, + "learning_rate": 0.00015382318493635535, + "loss": 0.272, + "step": 17940 + }, + { + "epoch": 0.6930769527780996, + "grad_norm": 1.6859287023544312, + "learning_rate": 0.00015379744391675354, + "loss": 0.3123, + "step": 17950 + }, + { + "epoch": 0.6934630680721263, + "grad_norm": 0.6281775832176208, + "learning_rate": 0.00015377170289715178, + "loss": 0.2808, + "step": 17960 + }, + { + "epoch": 0.6938491833661531, + "grad_norm": 3.756242036819458, + "learning_rate": 0.00015374596187754997, + "loss": 0.4971, + "step": 17970 + }, + { + "epoch": 0.69423529866018, + "grad_norm": 0.6022955775260925, + "learning_rate": 0.0001537202208579482, + "loss": 0.2918, + "step": 17980 + }, + { + "epoch": 0.6946214139542067, + "grad_norm": 0.6843704581260681, + "learning_rate": 0.0001536944798383464, + "loss": 0.476, + "step": 17990 + }, + { + "epoch": 0.6950075292482335, + "grad_norm": 1.0234850645065308, + "learning_rate": 0.0001536687388187446, + "loss": 0.2101, + "step": 18000 + }, + { + "epoch": 0.6953936445422603, + "grad_norm": 1.0228936672210693, + "learning_rate": 0.00015364299779914285, + "loss": 0.3958, + "step": 18010 + }, + { + "epoch": 0.6957797598362871, + "grad_norm": 1.1152328252792358, + "learning_rate": 0.00015361725677954103, + "loss": 0.2967, + "step": 18020 + }, + { + "epoch": 0.6961658751303139, + "grad_norm": 1.7190260887145996, + "learning_rate": 0.00015359151575993927, + "loss": 0.5281, + "step": 18030 + }, + { + "epoch": 0.6965519904243407, + "grad_norm": 0.6654171943664551, + "learning_rate": 0.00015356577474033746, + "loss": 0.3467, + "step": 18040 + }, + { + "epoch": 0.6969381057183675, + "grad_norm": 0.3305549621582031, + "learning_rate": 0.0001535400337207357, + "loss": 0.3576, + "step": 18050 + }, + { + "epoch": 0.6973242210123943, + "grad_norm": 0.3116997480392456, + "learning_rate": 0.0001535142927011339, + "loss": 0.5372, + "step": 18060 + }, + { + "epoch": 0.6977103363064211, + "grad_norm": 0.4224954843521118, + "learning_rate": 0.0001534885516815321, + "loss": 0.3866, + "step": 18070 + }, + { + "epoch": 0.6980964516004479, + "grad_norm": 4.249162197113037, + "learning_rate": 0.00015346281066193034, + "loss": 0.4236, + "step": 18080 + }, + { + "epoch": 0.6984825668944747, + "grad_norm": 1.109113335609436, + "learning_rate": 0.00015343706964232853, + "loss": 0.2882, + "step": 18090 + }, + { + "epoch": 0.6988686821885015, + "grad_norm": 1.3546028137207031, + "learning_rate": 0.00015341132862272677, + "loss": 0.3432, + "step": 18100 + }, + { + "epoch": 0.6992547974825283, + "grad_norm": 2.943016290664673, + "learning_rate": 0.00015338558760312495, + "loss": 0.3287, + "step": 18110 + }, + { + "epoch": 0.699640912776555, + "grad_norm": 1.1259021759033203, + "learning_rate": 0.0001533598465835232, + "loss": 0.4613, + "step": 18120 + }, + { + "epoch": 0.7000270280705819, + "grad_norm": 1.4867910146713257, + "learning_rate": 0.0001533341055639214, + "loss": 0.4309, + "step": 18130 + }, + { + "epoch": 0.7004131433646087, + "grad_norm": 2.6913414001464844, + "learning_rate": 0.0001533083645443196, + "loss": 0.2154, + "step": 18140 + }, + { + "epoch": 0.7007992586586355, + "grad_norm": 1.495466947555542, + "learning_rate": 0.00015328262352471783, + "loss": 0.3207, + "step": 18150 + }, + { + "epoch": 0.7011853739526622, + "grad_norm": 1.023193120956421, + "learning_rate": 0.00015325688250511602, + "loss": 0.2067, + "step": 18160 + }, + { + "epoch": 0.7015714892466891, + "grad_norm": 1.603235125541687, + "learning_rate": 0.00015323114148551426, + "loss": 0.4577, + "step": 18170 + }, + { + "epoch": 0.7019576045407159, + "grad_norm": 0.5976241230964661, + "learning_rate": 0.00015320540046591245, + "loss": 0.2282, + "step": 18180 + }, + { + "epoch": 0.7023437198347426, + "grad_norm": 2.561659574508667, + "learning_rate": 0.0001531796594463107, + "loss": 0.4045, + "step": 18190 + }, + { + "epoch": 0.7027298351287694, + "grad_norm": 1.3893495798110962, + "learning_rate": 0.0001531539184267089, + "loss": 0.2419, + "step": 18200 + }, + { + "epoch": 0.7031159504227963, + "grad_norm": 0.7786352038383484, + "learning_rate": 0.00015312817740710709, + "loss": 0.1653, + "step": 18210 + }, + { + "epoch": 0.703502065716823, + "grad_norm": 0.6525956988334656, + "learning_rate": 0.00015310243638750533, + "loss": 0.5418, + "step": 18220 + }, + { + "epoch": 0.7038881810108498, + "grad_norm": 0.38933584094047546, + "learning_rate": 0.0001530766953679035, + "loss": 0.2952, + "step": 18230 + }, + { + "epoch": 0.7042742963048766, + "grad_norm": 2.0752692222595215, + "learning_rate": 0.00015305095434830175, + "loss": 0.211, + "step": 18240 + }, + { + "epoch": 0.7046604115989035, + "grad_norm": 0.9095730781555176, + "learning_rate": 0.00015302521332869997, + "loss": 0.2723, + "step": 18250 + }, + { + "epoch": 0.7050465268929302, + "grad_norm": 1.6840119361877441, + "learning_rate": 0.00015299947230909818, + "loss": 0.3362, + "step": 18260 + }, + { + "epoch": 0.705432642186957, + "grad_norm": 2.0353269577026367, + "learning_rate": 0.0001529737312894964, + "loss": 0.2407, + "step": 18270 + }, + { + "epoch": 0.7058187574809838, + "grad_norm": 3.0865590572357178, + "learning_rate": 0.00015294799026989458, + "loss": 0.3426, + "step": 18280 + }, + { + "epoch": 0.7062048727750107, + "grad_norm": 1.6488090753555298, + "learning_rate": 0.00015292224925029282, + "loss": 0.4275, + "step": 18290 + }, + { + "epoch": 0.7065909880690374, + "grad_norm": 0.5494143962860107, + "learning_rate": 0.000152896508230691, + "loss": 0.412, + "step": 18300 + }, + { + "epoch": 0.7069771033630642, + "grad_norm": 3.111301898956299, + "learning_rate": 0.00015287076721108925, + "loss": 0.4615, + "step": 18310 + }, + { + "epoch": 0.707363218657091, + "grad_norm": 1.74229097366333, + "learning_rate": 0.00015284502619148746, + "loss": 0.3194, + "step": 18320 + }, + { + "epoch": 0.7077493339511178, + "grad_norm": 1.8455474376678467, + "learning_rate": 0.00015281928517188567, + "loss": 0.2817, + "step": 18330 + }, + { + "epoch": 0.7081354492451446, + "grad_norm": 1.778723120689392, + "learning_rate": 0.0001527935441522839, + "loss": 0.441, + "step": 18340 + }, + { + "epoch": 0.7085215645391714, + "grad_norm": 0.7885593771934509, + "learning_rate": 0.00015276780313268207, + "loss": 0.3121, + "step": 18350 + }, + { + "epoch": 0.7089076798331982, + "grad_norm": 1.9262609481811523, + "learning_rate": 0.0001527420621130803, + "loss": 0.3271, + "step": 18360 + }, + { + "epoch": 0.709293795127225, + "grad_norm": 0.10278096795082092, + "learning_rate": 0.0001527163210934785, + "loss": 0.2602, + "step": 18370 + }, + { + "epoch": 0.7096799104212518, + "grad_norm": 1.2394765615463257, + "learning_rate": 0.00015269058007387674, + "loss": 0.2641, + "step": 18380 + }, + { + "epoch": 0.7100660257152785, + "grad_norm": 2.0335285663604736, + "learning_rate": 0.00015266483905427495, + "loss": 0.1926, + "step": 18390 + }, + { + "epoch": 0.7104521410093054, + "grad_norm": 7.205105781555176, + "learning_rate": 0.00015263909803467317, + "loss": 0.4772, + "step": 18400 + }, + { + "epoch": 0.7108382563033322, + "grad_norm": 1.1549599170684814, + "learning_rate": 0.00015261335701507138, + "loss": 0.2775, + "step": 18410 + }, + { + "epoch": 0.711224371597359, + "grad_norm": 2.8363780975341797, + "learning_rate": 0.00015258761599546957, + "loss": 0.3533, + "step": 18420 + }, + { + "epoch": 0.7116104868913857, + "grad_norm": 0.2606666684150696, + "learning_rate": 0.0001525618749758678, + "loss": 0.1421, + "step": 18430 + }, + { + "epoch": 0.7119966021854126, + "grad_norm": 1.2934225797653198, + "learning_rate": 0.00015253613395626602, + "loss": 0.3066, + "step": 18440 + }, + { + "epoch": 0.7123827174794394, + "grad_norm": 3.8246026039123535, + "learning_rate": 0.00015251039293666423, + "loss": 0.534, + "step": 18450 + }, + { + "epoch": 0.7127688327734661, + "grad_norm": 2.2535433769226074, + "learning_rate": 0.00015248465191706245, + "loss": 0.4795, + "step": 18460 + }, + { + "epoch": 0.7131549480674929, + "grad_norm": 1.6749187707901, + "learning_rate": 0.00015245891089746066, + "loss": 0.4091, + "step": 18470 + }, + { + "epoch": 0.7135410633615198, + "grad_norm": 1.1795039176940918, + "learning_rate": 0.00015243316987785887, + "loss": 0.2612, + "step": 18480 + }, + { + "epoch": 0.7139271786555466, + "grad_norm": 1.9280221462249756, + "learning_rate": 0.00015240742885825709, + "loss": 0.2068, + "step": 18490 + }, + { + "epoch": 0.7143132939495733, + "grad_norm": 1.1188548803329468, + "learning_rate": 0.0001523816878386553, + "loss": 0.3193, + "step": 18500 + }, + { + "epoch": 0.7146994092436001, + "grad_norm": 0.2429720014333725, + "learning_rate": 0.0001523559468190535, + "loss": 0.3129, + "step": 18510 + }, + { + "epoch": 0.715085524537627, + "grad_norm": 4.09410285949707, + "learning_rate": 0.00015233020579945173, + "loss": 0.2437, + "step": 18520 + }, + { + "epoch": 0.7154716398316537, + "grad_norm": 2.3252813816070557, + "learning_rate": 0.00015230446477984994, + "loss": 0.4649, + "step": 18530 + }, + { + "epoch": 0.7158577551256805, + "grad_norm": 0.5725727677345276, + "learning_rate": 0.00015227872376024815, + "loss": 0.3291, + "step": 18540 + }, + { + "epoch": 0.7162438704197073, + "grad_norm": 0.9253637194633484, + "learning_rate": 0.00015225298274064637, + "loss": 0.3486, + "step": 18550 + }, + { + "epoch": 0.7166299857137342, + "grad_norm": 2.3353309631347656, + "learning_rate": 0.00015222724172104458, + "loss": 0.253, + "step": 18560 + }, + { + "epoch": 0.7170161010077609, + "grad_norm": 0.7312389016151428, + "learning_rate": 0.0001522015007014428, + "loss": 0.2817, + "step": 18570 + }, + { + "epoch": 0.7174022163017877, + "grad_norm": 0.6564128994941711, + "learning_rate": 0.000152175759681841, + "loss": 0.2896, + "step": 18580 + }, + { + "epoch": 0.7177883315958145, + "grad_norm": 3.4619979858398438, + "learning_rate": 0.00015215001866223922, + "loss": 0.5028, + "step": 18590 + }, + { + "epoch": 0.7181744468898413, + "grad_norm": 6.910060882568359, + "learning_rate": 0.00015212427764263743, + "loss": 0.2467, + "step": 18600 + }, + { + "epoch": 0.7185605621838681, + "grad_norm": 2.022186279296875, + "learning_rate": 0.00015209853662303565, + "loss": 0.3406, + "step": 18610 + }, + { + "epoch": 0.7189466774778949, + "grad_norm": 1.2240760326385498, + "learning_rate": 0.00015207279560343386, + "loss": 0.3391, + "step": 18620 + }, + { + "epoch": 0.7193327927719217, + "grad_norm": 0.7356148958206177, + "learning_rate": 0.00015204705458383207, + "loss": 0.2679, + "step": 18630 + }, + { + "epoch": 0.7197189080659485, + "grad_norm": 0.963387131690979, + "learning_rate": 0.00015202131356423029, + "loss": 0.4088, + "step": 18640 + }, + { + "epoch": 0.7201050233599753, + "grad_norm": 3.0437800884246826, + "learning_rate": 0.0001519955725446285, + "loss": 0.2591, + "step": 18650 + }, + { + "epoch": 0.720491138654002, + "grad_norm": 2.5874569416046143, + "learning_rate": 0.0001519698315250267, + "loss": 0.2377, + "step": 18660 + }, + { + "epoch": 0.7208772539480289, + "grad_norm": 2.3215808868408203, + "learning_rate": 0.00015194409050542493, + "loss": 0.5335, + "step": 18670 + }, + { + "epoch": 0.7212633692420557, + "grad_norm": 1.9501638412475586, + "learning_rate": 0.00015191834948582314, + "loss": 0.4657, + "step": 18680 + }, + { + "epoch": 0.7216494845360825, + "grad_norm": 1.8396021127700806, + "learning_rate": 0.00015189260846622135, + "loss": 0.2719, + "step": 18690 + }, + { + "epoch": 0.7220355998301092, + "grad_norm": 0.9337745904922485, + "learning_rate": 0.00015186686744661957, + "loss": 0.3672, + "step": 18700 + }, + { + "epoch": 0.7224217151241361, + "grad_norm": 1.892098069190979, + "learning_rate": 0.00015184112642701778, + "loss": 0.6085, + "step": 18710 + }, + { + "epoch": 0.7228078304181629, + "grad_norm": 1.051630973815918, + "learning_rate": 0.000151815385407416, + "loss": 0.2422, + "step": 18720 + }, + { + "epoch": 0.7231939457121896, + "grad_norm": 0.8714147210121155, + "learning_rate": 0.0001517896443878142, + "loss": 0.4046, + "step": 18730 + }, + { + "epoch": 0.7235800610062164, + "grad_norm": 0.5002617835998535, + "learning_rate": 0.00015176390336821242, + "loss": 0.3708, + "step": 18740 + }, + { + "epoch": 0.7239661763002433, + "grad_norm": 1.3960262537002563, + "learning_rate": 0.00015173816234861066, + "loss": 0.3206, + "step": 18750 + }, + { + "epoch": 0.7243522915942701, + "grad_norm": 0.7899012565612793, + "learning_rate": 0.00015171242132900885, + "loss": 0.2875, + "step": 18760 + }, + { + "epoch": 0.7247384068882968, + "grad_norm": 0.7216291427612305, + "learning_rate": 0.00015168668030940706, + "loss": 0.3885, + "step": 18770 + }, + { + "epoch": 0.7251245221823236, + "grad_norm": 0.36028966307640076, + "learning_rate": 0.00015166093928980527, + "loss": 0.3542, + "step": 18780 + }, + { + "epoch": 0.7255106374763505, + "grad_norm": 1.378724455833435, + "learning_rate": 0.00015163519827020349, + "loss": 0.2149, + "step": 18790 + }, + { + "epoch": 0.7258967527703772, + "grad_norm": 1.544819712638855, + "learning_rate": 0.0001516094572506017, + "loss": 0.278, + "step": 18800 + }, + { + "epoch": 0.726282868064404, + "grad_norm": 0.5819025039672852, + "learning_rate": 0.0001515837162309999, + "loss": 0.2161, + "step": 18810 + }, + { + "epoch": 0.7266689833584308, + "grad_norm": 0.8350955843925476, + "learning_rate": 0.00015155797521139815, + "loss": 0.4134, + "step": 18820 + }, + { + "epoch": 0.7270550986524577, + "grad_norm": 2.4110195636749268, + "learning_rate": 0.00015153223419179634, + "loss": 0.3928, + "step": 18830 + }, + { + "epoch": 0.7274412139464844, + "grad_norm": 0.7913835048675537, + "learning_rate": 0.00015150649317219455, + "loss": 0.2336, + "step": 18840 + }, + { + "epoch": 0.7278273292405112, + "grad_norm": 2.13431978225708, + "learning_rate": 0.00015148075215259277, + "loss": 0.4758, + "step": 18850 + }, + { + "epoch": 0.7282134445345381, + "grad_norm": 1.2756295204162598, + "learning_rate": 0.00015145501113299098, + "loss": 0.3139, + "step": 18860 + }, + { + "epoch": 0.7285995598285648, + "grad_norm": 0.3661370277404785, + "learning_rate": 0.0001514292701133892, + "loss": 0.3913, + "step": 18870 + }, + { + "epoch": 0.7289856751225916, + "grad_norm": 1.2238267660140991, + "learning_rate": 0.0001514035290937874, + "loss": 0.1893, + "step": 18880 + }, + { + "epoch": 0.7293717904166184, + "grad_norm": 2.8876595497131348, + "learning_rate": 0.00015137778807418565, + "loss": 0.2567, + "step": 18890 + }, + { + "epoch": 0.7297579057106452, + "grad_norm": 1.248967170715332, + "learning_rate": 0.00015135204705458383, + "loss": 0.2004, + "step": 18900 + }, + { + "epoch": 0.730144021004672, + "grad_norm": 0.9446873068809509, + "learning_rate": 0.00015132630603498205, + "loss": 0.386, + "step": 18910 + }, + { + "epoch": 0.7305301362986988, + "grad_norm": 4.592974662780762, + "learning_rate": 0.00015130056501538026, + "loss": 0.4549, + "step": 18920 + }, + { + "epoch": 0.7309162515927256, + "grad_norm": 0.8000105619430542, + "learning_rate": 0.00015127482399577847, + "loss": 0.2795, + "step": 18930 + }, + { + "epoch": 0.7313023668867524, + "grad_norm": 0.5600059628486633, + "learning_rate": 0.0001512490829761767, + "loss": 0.3682, + "step": 18940 + }, + { + "epoch": 0.7316884821807792, + "grad_norm": 0.4072086811065674, + "learning_rate": 0.0001512233419565749, + "loss": 0.3186, + "step": 18950 + }, + { + "epoch": 0.732074597474806, + "grad_norm": 3.387422561645508, + "learning_rate": 0.00015119760093697314, + "loss": 0.3195, + "step": 18960 + }, + { + "epoch": 0.7324607127688327, + "grad_norm": 0.5224191546440125, + "learning_rate": 0.00015117185991737133, + "loss": 0.1792, + "step": 18970 + }, + { + "epoch": 0.7328468280628596, + "grad_norm": 0.5431543588638306, + "learning_rate": 0.00015114611889776954, + "loss": 0.2775, + "step": 18980 + }, + { + "epoch": 0.7332329433568864, + "grad_norm": 0.13088488578796387, + "learning_rate": 0.00015112037787816775, + "loss": 0.3511, + "step": 18990 + }, + { + "epoch": 0.7336190586509131, + "grad_norm": 0.6414417624473572, + "learning_rate": 0.00015109463685856597, + "loss": 0.4042, + "step": 19000 + }, + { + "epoch": 0.7340051739449399, + "grad_norm": 0.2358855903148651, + "learning_rate": 0.0001510688958389642, + "loss": 0.2567, + "step": 19010 + }, + { + "epoch": 0.7343912892389668, + "grad_norm": 1.343703031539917, + "learning_rate": 0.0001510431548193624, + "loss": 0.3658, + "step": 19020 + }, + { + "epoch": 0.7347774045329936, + "grad_norm": 2.5982301235198975, + "learning_rate": 0.00015101741379976063, + "loss": 0.3664, + "step": 19030 + }, + { + "epoch": 0.7351635198270203, + "grad_norm": 1.3639850616455078, + "learning_rate": 0.00015099167278015882, + "loss": 0.1809, + "step": 19040 + }, + { + "epoch": 0.7355496351210471, + "grad_norm": 1.322572946548462, + "learning_rate": 0.00015096593176055703, + "loss": 0.213, + "step": 19050 + }, + { + "epoch": 0.735935750415074, + "grad_norm": 0.6858059763908386, + "learning_rate": 0.00015094019074095527, + "loss": 0.1494, + "step": 19060 + }, + { + "epoch": 0.7363218657091007, + "grad_norm": 3.9815866947174072, + "learning_rate": 0.00015091444972135346, + "loss": 0.4879, + "step": 19070 + }, + { + "epoch": 0.7367079810031275, + "grad_norm": 0.5155348777770996, + "learning_rate": 0.0001508887087017517, + "loss": 0.1951, + "step": 19080 + }, + { + "epoch": 0.7370940962971543, + "grad_norm": 1.1120082139968872, + "learning_rate": 0.00015086296768214988, + "loss": 0.3156, + "step": 19090 + }, + { + "epoch": 0.7374802115911812, + "grad_norm": 2.1396732330322266, + "learning_rate": 0.00015083722666254813, + "loss": 0.3528, + "step": 19100 + }, + { + "epoch": 0.7378663268852079, + "grad_norm": 1.8543074131011963, + "learning_rate": 0.0001508114856429463, + "loss": 0.2778, + "step": 19110 + }, + { + "epoch": 0.7382524421792347, + "grad_norm": 0.2955397963523865, + "learning_rate": 0.00015078574462334452, + "loss": 0.3267, + "step": 19120 + }, + { + "epoch": 0.7386385574732616, + "grad_norm": 0.6277685165405273, + "learning_rate": 0.00015076000360374277, + "loss": 0.3316, + "step": 19130 + }, + { + "epoch": 0.7390246727672883, + "grad_norm": 1.4339113235473633, + "learning_rate": 0.00015073426258414095, + "loss": 0.3227, + "step": 19140 + }, + { + "epoch": 0.7394107880613151, + "grad_norm": 2.74206805229187, + "learning_rate": 0.0001507085215645392, + "loss": 0.2086, + "step": 19150 + }, + { + "epoch": 0.7397969033553419, + "grad_norm": 1.0108954906463623, + "learning_rate": 0.00015068278054493738, + "loss": 0.2697, + "step": 19160 + }, + { + "epoch": 0.7401830186493688, + "grad_norm": 1.726650357246399, + "learning_rate": 0.00015065703952533562, + "loss": 0.1312, + "step": 19170 + }, + { + "epoch": 0.7405691339433955, + "grad_norm": 1.8226735591888428, + "learning_rate": 0.0001506312985057338, + "loss": 0.4488, + "step": 19180 + }, + { + "epoch": 0.7409552492374223, + "grad_norm": 1.4812517166137695, + "learning_rate": 0.00015060555748613205, + "loss": 0.472, + "step": 19190 + }, + { + "epoch": 0.741341364531449, + "grad_norm": 1.184036374092102, + "learning_rate": 0.00015057981646653026, + "loss": 0.1342, + "step": 19200 + }, + { + "epoch": 0.7417274798254759, + "grad_norm": 2.925368309020996, + "learning_rate": 0.00015055407544692844, + "loss": 0.4114, + "step": 19210 + }, + { + "epoch": 0.7421135951195027, + "grad_norm": 1.4891862869262695, + "learning_rate": 0.00015052833442732669, + "loss": 0.3454, + "step": 19220 + }, + { + "epoch": 0.7424997104135295, + "grad_norm": 2.9221529960632324, + "learning_rate": 0.00015050259340772487, + "loss": 0.4622, + "step": 19230 + }, + { + "epoch": 0.7428858257075562, + "grad_norm": 1.3214635848999023, + "learning_rate": 0.0001504768523881231, + "loss": 0.2783, + "step": 19240 + }, + { + "epoch": 0.7432719410015831, + "grad_norm": 1.1919734477996826, + "learning_rate": 0.00015045111136852133, + "loss": 0.3667, + "step": 19250 + }, + { + "epoch": 0.7436580562956099, + "grad_norm": 1.7075424194335938, + "learning_rate": 0.00015042537034891954, + "loss": 0.1867, + "step": 19260 + }, + { + "epoch": 0.7440441715896366, + "grad_norm": 0.6810876727104187, + "learning_rate": 0.00015039962932931775, + "loss": 0.2231, + "step": 19270 + }, + { + "epoch": 0.7444302868836634, + "grad_norm": 1.3421598672866821, + "learning_rate": 0.00015037388830971594, + "loss": 0.2138, + "step": 19280 + }, + { + "epoch": 0.7448164021776903, + "grad_norm": 1.5983080863952637, + "learning_rate": 0.00015034814729011418, + "loss": 0.2799, + "step": 19290 + }, + { + "epoch": 0.7452025174717171, + "grad_norm": 2.0319182872772217, + "learning_rate": 0.00015032240627051236, + "loss": 0.213, + "step": 19300 + }, + { + "epoch": 0.7455886327657438, + "grad_norm": 0.6433222889900208, + "learning_rate": 0.0001502966652509106, + "loss": 0.3604, + "step": 19310 + }, + { + "epoch": 0.7459747480597706, + "grad_norm": 1.1373825073242188, + "learning_rate": 0.00015027092423130882, + "loss": 0.3007, + "step": 19320 + }, + { + "epoch": 0.7463608633537975, + "grad_norm": 1.7230875492095947, + "learning_rate": 0.00015024518321170703, + "loss": 0.2471, + "step": 19330 + }, + { + "epoch": 0.7467469786478242, + "grad_norm": 1.73224937915802, + "learning_rate": 0.00015021944219210524, + "loss": 0.4877, + "step": 19340 + }, + { + "epoch": 0.747133093941851, + "grad_norm": 0.9023095369338989, + "learning_rate": 0.00015019370117250343, + "loss": 0.352, + "step": 19350 + }, + { + "epoch": 0.7475192092358778, + "grad_norm": 1.898983120918274, + "learning_rate": 0.00015016796015290167, + "loss": 0.3981, + "step": 19360 + }, + { + "epoch": 0.7479053245299047, + "grad_norm": 1.8544923067092896, + "learning_rate": 0.00015014221913329986, + "loss": 0.3015, + "step": 19370 + }, + { + "epoch": 0.7482914398239314, + "grad_norm": 1.1380795240402222, + "learning_rate": 0.0001501164781136981, + "loss": 0.3173, + "step": 19380 + }, + { + "epoch": 0.7486775551179582, + "grad_norm": 0.5071800351142883, + "learning_rate": 0.0001500907370940963, + "loss": 0.46, + "step": 19390 + }, + { + "epoch": 0.7490636704119851, + "grad_norm": 0.30739355087280273, + "learning_rate": 0.00015006499607449452, + "loss": 0.4942, + "step": 19400 + }, + { + "epoch": 0.7494497857060118, + "grad_norm": 1.1223585605621338, + "learning_rate": 0.00015003925505489274, + "loss": 0.3118, + "step": 19410 + }, + { + "epoch": 0.7498359010000386, + "grad_norm": 1.019545555114746, + "learning_rate": 0.00015001351403529092, + "loss": 0.3115, + "step": 19420 + }, + { + "epoch": 0.7502220162940654, + "grad_norm": 0.4567502439022064, + "learning_rate": 0.00014998777301568916, + "loss": 0.2932, + "step": 19430 + }, + { + "epoch": 0.7506081315880923, + "grad_norm": 1.669258952140808, + "learning_rate": 0.00014996203199608738, + "loss": 0.2889, + "step": 19440 + }, + { + "epoch": 0.750994246882119, + "grad_norm": 0.2787584364414215, + "learning_rate": 0.0001499362909764856, + "loss": 0.3482, + "step": 19450 + }, + { + "epoch": 0.7513803621761458, + "grad_norm": 1.6648303270339966, + "learning_rate": 0.0001499105499568838, + "loss": 0.3136, + "step": 19460 + }, + { + "epoch": 0.7517664774701726, + "grad_norm": 2.6357266902923584, + "learning_rate": 0.00014988480893728202, + "loss": 0.4643, + "step": 19470 + }, + { + "epoch": 0.7521525927641994, + "grad_norm": 1.8017394542694092, + "learning_rate": 0.00014985906791768023, + "loss": 0.469, + "step": 19480 + }, + { + "epoch": 0.7525387080582262, + "grad_norm": 1.8467847108840942, + "learning_rate": 0.00014983332689807842, + "loss": 0.1495, + "step": 19490 + }, + { + "epoch": 0.752924823352253, + "grad_norm": 2.9446980953216553, + "learning_rate": 0.00014980758587847666, + "loss": 0.3162, + "step": 19500 + }, + { + "epoch": 0.7533109386462797, + "grad_norm": 1.4076721668243408, + "learning_rate": 0.00014978184485887487, + "loss": 0.2356, + "step": 19510 + }, + { + "epoch": 0.7536970539403066, + "grad_norm": 1.2669463157653809, + "learning_rate": 0.00014975610383927308, + "loss": 0.3056, + "step": 19520 + }, + { + "epoch": 0.7540831692343334, + "grad_norm": 1.1390401124954224, + "learning_rate": 0.0001497303628196713, + "loss": 0.2785, + "step": 19530 + }, + { + "epoch": 0.7544692845283602, + "grad_norm": 3.095099925994873, + "learning_rate": 0.0001497046218000695, + "loss": 0.489, + "step": 19540 + }, + { + "epoch": 0.7548553998223869, + "grad_norm": 1.5737907886505127, + "learning_rate": 0.00014967888078046772, + "loss": 0.3981, + "step": 19550 + }, + { + "epoch": 0.7552415151164138, + "grad_norm": 1.4953045845031738, + "learning_rate": 0.00014965313976086594, + "loss": 0.3153, + "step": 19560 + }, + { + "epoch": 0.7556276304104406, + "grad_norm": 0.7709154486656189, + "learning_rate": 0.00014962739874126415, + "loss": 0.2904, + "step": 19570 + }, + { + "epoch": 0.7560137457044673, + "grad_norm": 2.118950366973877, + "learning_rate": 0.00014960165772166236, + "loss": 0.3736, + "step": 19580 + }, + { + "epoch": 0.7563998609984941, + "grad_norm": 2.9463138580322266, + "learning_rate": 0.00014957591670206058, + "loss": 0.3186, + "step": 19590 + }, + { + "epoch": 0.756785976292521, + "grad_norm": 1.3784689903259277, + "learning_rate": 0.0001495501756824588, + "loss": 0.3589, + "step": 19600 + }, + { + "epoch": 0.7571720915865477, + "grad_norm": 2.35467267036438, + "learning_rate": 0.000149524434662857, + "loss": 0.1282, + "step": 19610 + }, + { + "epoch": 0.7575582068805745, + "grad_norm": 0.7167999148368835, + "learning_rate": 0.00014949869364325522, + "loss": 0.3775, + "step": 19620 + }, + { + "epoch": 0.7579443221746013, + "grad_norm": 1.242785096168518, + "learning_rate": 0.00014947295262365343, + "loss": 0.3814, + "step": 19630 + }, + { + "epoch": 0.7583304374686282, + "grad_norm": 2.7875797748565674, + "learning_rate": 0.00014944721160405164, + "loss": 0.4764, + "step": 19640 + }, + { + "epoch": 0.7587165527626549, + "grad_norm": 1.2169462442398071, + "learning_rate": 0.00014942147058444986, + "loss": 0.3132, + "step": 19650 + }, + { + "epoch": 0.7591026680566817, + "grad_norm": 3.146204948425293, + "learning_rate": 0.00014939572956484807, + "loss": 0.248, + "step": 19660 + }, + { + "epoch": 0.7594887833507086, + "grad_norm": 2.2177276611328125, + "learning_rate": 0.00014936998854524628, + "loss": 0.3755, + "step": 19670 + }, + { + "epoch": 0.7598748986447353, + "grad_norm": 0.8063843250274658, + "learning_rate": 0.0001493442475256445, + "loss": 0.6341, + "step": 19680 + }, + { + "epoch": 0.7602610139387621, + "grad_norm": 0.18064215779304504, + "learning_rate": 0.0001493185065060427, + "loss": 0.3565, + "step": 19690 + }, + { + "epoch": 0.7606471292327889, + "grad_norm": 1.4401954412460327, + "learning_rate": 0.00014929276548644092, + "loss": 0.1873, + "step": 19700 + }, + { + "epoch": 0.7610332445268158, + "grad_norm": 0.7269515991210938, + "learning_rate": 0.00014926702446683914, + "loss": 0.2765, + "step": 19710 + }, + { + "epoch": 0.7614193598208425, + "grad_norm": 1.2779995203018188, + "learning_rate": 0.00014924128344723735, + "loss": 0.3633, + "step": 19720 + }, + { + "epoch": 0.7618054751148693, + "grad_norm": 1.3330426216125488, + "learning_rate": 0.00014921554242763556, + "loss": 0.3736, + "step": 19730 + }, + { + "epoch": 0.7621915904088961, + "grad_norm": 1.4269347190856934, + "learning_rate": 0.00014918980140803378, + "loss": 0.2572, + "step": 19740 + }, + { + "epoch": 0.7625777057029229, + "grad_norm": 0.9075976610183716, + "learning_rate": 0.000149164060388432, + "loss": 0.3609, + "step": 19750 + }, + { + "epoch": 0.7629638209969497, + "grad_norm": 1.7566559314727783, + "learning_rate": 0.0001491383193688302, + "loss": 0.3134, + "step": 19760 + }, + { + "epoch": 0.7633499362909765, + "grad_norm": 2.4620914459228516, + "learning_rate": 0.00014911257834922842, + "loss": 0.3559, + "step": 19770 + }, + { + "epoch": 0.7637360515850032, + "grad_norm": 0.9474597573280334, + "learning_rate": 0.00014908683732962663, + "loss": 0.3821, + "step": 19780 + }, + { + "epoch": 0.7641221668790301, + "grad_norm": 0.6298363208770752, + "learning_rate": 0.00014906109631002484, + "loss": 0.2801, + "step": 19790 + }, + { + "epoch": 0.7645082821730569, + "grad_norm": 0.213288351893425, + "learning_rate": 0.00014903535529042306, + "loss": 0.368, + "step": 19800 + }, + { + "epoch": 0.7648943974670837, + "grad_norm": 1.4412375688552856, + "learning_rate": 0.00014900961427082127, + "loss": 0.3972, + "step": 19810 + }, + { + "epoch": 0.7652805127611104, + "grad_norm": 1.6808812618255615, + "learning_rate": 0.00014898387325121948, + "loss": 0.5302, + "step": 19820 + }, + { + "epoch": 0.7656666280551373, + "grad_norm": 0.922726571559906, + "learning_rate": 0.0001489581322316177, + "loss": 0.3299, + "step": 19830 + }, + { + "epoch": 0.7660527433491641, + "grad_norm": 0.5058152079582214, + "learning_rate": 0.0001489323912120159, + "loss": 0.4485, + "step": 19840 + }, + { + "epoch": 0.7664388586431908, + "grad_norm": 1.3025776147842407, + "learning_rate": 0.00014890665019241412, + "loss": 0.266, + "step": 19850 + }, + { + "epoch": 0.7668249739372176, + "grad_norm": 2.0282516479492188, + "learning_rate": 0.00014888090917281234, + "loss": 0.7752, + "step": 19860 + }, + { + "epoch": 0.7672110892312445, + "grad_norm": 0.6557582020759583, + "learning_rate": 0.00014885516815321058, + "loss": 0.256, + "step": 19870 + }, + { + "epoch": 0.7675972045252712, + "grad_norm": 0.13688494265079498, + "learning_rate": 0.00014882942713360876, + "loss": 0.263, + "step": 19880 + }, + { + "epoch": 0.767983319819298, + "grad_norm": 0.04091642051935196, + "learning_rate": 0.00014880368611400698, + "loss": 0.276, + "step": 19890 + }, + { + "epoch": 0.7683694351133248, + "grad_norm": 3.054969072341919, + "learning_rate": 0.0001487779450944052, + "loss": 0.5198, + "step": 19900 + }, + { + "epoch": 0.7687555504073517, + "grad_norm": 0.5707372426986694, + "learning_rate": 0.0001487522040748034, + "loss": 0.2715, + "step": 19910 + }, + { + "epoch": 0.7691416657013784, + "grad_norm": 0.477830708026886, + "learning_rate": 0.00014872646305520162, + "loss": 0.2517, + "step": 19920 + }, + { + "epoch": 0.7695277809954052, + "grad_norm": 1.0550785064697266, + "learning_rate": 0.00014870072203559983, + "loss": 0.2552, + "step": 19930 + }, + { + "epoch": 0.7699138962894321, + "grad_norm": 1.9678715467453003, + "learning_rate": 0.00014867498101599807, + "loss": 0.4487, + "step": 19940 + }, + { + "epoch": 0.7703000115834588, + "grad_norm": 0.4506283104419708, + "learning_rate": 0.00014864923999639626, + "loss": 0.2632, + "step": 19950 + }, + { + "epoch": 0.7706861268774856, + "grad_norm": 1.8445035219192505, + "learning_rate": 0.0001486234989767945, + "loss": 0.3315, + "step": 19960 + }, + { + "epoch": 0.7710722421715124, + "grad_norm": 2.6394076347351074, + "learning_rate": 0.00014859775795719268, + "loss": 0.3216, + "step": 19970 + }, + { + "epoch": 0.7714583574655393, + "grad_norm": 0.6717782020568848, + "learning_rate": 0.0001485720169375909, + "loss": 0.3531, + "step": 19980 + }, + { + "epoch": 0.771844472759566, + "grad_norm": 0.5959204435348511, + "learning_rate": 0.0001485462759179891, + "loss": 0.2095, + "step": 19990 + }, + { + "epoch": 0.7722305880535928, + "grad_norm": 1.874375581741333, + "learning_rate": 0.00014852053489838732, + "loss": 0.4845, + "step": 20000 + }, + { + "epoch": 0.7726167033476196, + "grad_norm": 2.6810474395751953, + "learning_rate": 0.00014849479387878556, + "loss": 0.471, + "step": 20010 + }, + { + "epoch": 0.7730028186416464, + "grad_norm": 0.5498594045639038, + "learning_rate": 0.00014846905285918375, + "loss": 0.2265, + "step": 20020 + }, + { + "epoch": 0.7733889339356732, + "grad_norm": 0.7658601999282837, + "learning_rate": 0.000148443311839582, + "loss": 0.4459, + "step": 20030 + }, + { + "epoch": 0.7737750492297, + "grad_norm": 0.5597706437110901, + "learning_rate": 0.00014841757081998018, + "loss": 0.4317, + "step": 20040 + }, + { + "epoch": 0.7741611645237267, + "grad_norm": 0.740342915058136, + "learning_rate": 0.0001483918298003784, + "loss": 0.4158, + "step": 20050 + }, + { + "epoch": 0.7745472798177536, + "grad_norm": 0.6069484949111938, + "learning_rate": 0.00014836608878077663, + "loss": 0.2363, + "step": 20060 + }, + { + "epoch": 0.7749333951117804, + "grad_norm": 0.9331381916999817, + "learning_rate": 0.00014834034776117482, + "loss": 0.2063, + "step": 20070 + }, + { + "epoch": 0.7753195104058072, + "grad_norm": 1.298399806022644, + "learning_rate": 0.00014831460674157306, + "loss": 0.319, + "step": 20080 + }, + { + "epoch": 0.7757056256998339, + "grad_norm": 1.887229323387146, + "learning_rate": 0.00014828886572197124, + "loss": 0.3805, + "step": 20090 + }, + { + "epoch": 0.7760917409938608, + "grad_norm": 0.5734463930130005, + "learning_rate": 0.00014826312470236948, + "loss": 0.392, + "step": 20100 + }, + { + "epoch": 0.7764778562878876, + "grad_norm": 1.10410475730896, + "learning_rate": 0.00014823738368276767, + "loss": 0.3222, + "step": 20110 + }, + { + "epoch": 0.7768639715819143, + "grad_norm": 3.1354923248291016, + "learning_rate": 0.00014821164266316588, + "loss": 0.3434, + "step": 20120 + }, + { + "epoch": 0.7772500868759411, + "grad_norm": 1.9921913146972656, + "learning_rate": 0.00014818590164356412, + "loss": 0.222, + "step": 20130 + }, + { + "epoch": 0.777636202169968, + "grad_norm": 0.38615912199020386, + "learning_rate": 0.0001481601606239623, + "loss": 0.1859, + "step": 20140 + }, + { + "epoch": 0.7780223174639948, + "grad_norm": 0.919252336025238, + "learning_rate": 0.00014813441960436055, + "loss": 0.3889, + "step": 20150 + }, + { + "epoch": 0.7784084327580215, + "grad_norm": 0.9474624991416931, + "learning_rate": 0.00014810867858475874, + "loss": 0.3086, + "step": 20160 + }, + { + "epoch": 0.7787945480520484, + "grad_norm": 1.1992007493972778, + "learning_rate": 0.00014808293756515698, + "loss": 0.3949, + "step": 20170 + }, + { + "epoch": 0.7791806633460752, + "grad_norm": 2.2405364513397217, + "learning_rate": 0.00014805719654555516, + "loss": 0.4963, + "step": 20180 + }, + { + "epoch": 0.7795667786401019, + "grad_norm": 0.9163286685943604, + "learning_rate": 0.00014803145552595338, + "loss": 0.4022, + "step": 20190 + }, + { + "epoch": 0.7799528939341287, + "grad_norm": 0.995309591293335, + "learning_rate": 0.00014800571450635162, + "loss": 0.244, + "step": 20200 + }, + { + "epoch": 0.7803390092281556, + "grad_norm": 3.0966508388519287, + "learning_rate": 0.0001479799734867498, + "loss": 0.4165, + "step": 20210 + }, + { + "epoch": 0.7807251245221823, + "grad_norm": 1.7679264545440674, + "learning_rate": 0.00014795423246714804, + "loss": 0.3264, + "step": 20220 + }, + { + "epoch": 0.7811112398162091, + "grad_norm": 3.405724048614502, + "learning_rate": 0.00014792849144754623, + "loss": 0.5292, + "step": 20230 + }, + { + "epoch": 0.7814973551102359, + "grad_norm": 0.5335774421691895, + "learning_rate": 0.00014790275042794447, + "loss": 0.2901, + "step": 20240 + }, + { + "epoch": 0.7818834704042628, + "grad_norm": 2.332176446914673, + "learning_rate": 0.00014787700940834268, + "loss": 0.3116, + "step": 20250 + }, + { + "epoch": 0.7822695856982895, + "grad_norm": 1.226844072341919, + "learning_rate": 0.00014785126838874087, + "loss": 0.332, + "step": 20260 + }, + { + "epoch": 0.7826557009923163, + "grad_norm": 0.8310544490814209, + "learning_rate": 0.0001478255273691391, + "loss": 0.3929, + "step": 20270 + }, + { + "epoch": 0.7830418162863431, + "grad_norm": 4.065925121307373, + "learning_rate": 0.0001477997863495373, + "loss": 0.3044, + "step": 20280 + }, + { + "epoch": 0.7834279315803699, + "grad_norm": 1.3661054372787476, + "learning_rate": 0.00014777404532993554, + "loss": 0.2396, + "step": 20290 + }, + { + "epoch": 0.7838140468743967, + "grad_norm": 2.047938346862793, + "learning_rate": 0.00014774830431033372, + "loss": 0.2954, + "step": 20300 + }, + { + "epoch": 0.7842001621684235, + "grad_norm": 2.099759817123413, + "learning_rate": 0.00014772256329073196, + "loss": 0.1987, + "step": 20310 + }, + { + "epoch": 0.7845862774624502, + "grad_norm": 4.439823627471924, + "learning_rate": 0.00014769682227113018, + "loss": 0.2449, + "step": 20320 + }, + { + "epoch": 0.7849723927564771, + "grad_norm": 1.894250750541687, + "learning_rate": 0.00014767108125152836, + "loss": 0.3335, + "step": 20330 + }, + { + "epoch": 0.7853585080505039, + "grad_norm": 2.2916924953460693, + "learning_rate": 0.0001476453402319266, + "loss": 0.3828, + "step": 20340 + }, + { + "epoch": 0.7857446233445307, + "grad_norm": 3.7847397327423096, + "learning_rate": 0.0001476195992123248, + "loss": 0.2142, + "step": 20350 + }, + { + "epoch": 0.7861307386385574, + "grad_norm": 2.7507593631744385, + "learning_rate": 0.00014759385819272303, + "loss": 0.39, + "step": 20360 + }, + { + "epoch": 0.7865168539325843, + "grad_norm": 2.500195264816284, + "learning_rate": 0.00014756811717312124, + "loss": 0.4163, + "step": 20370 + }, + { + "epoch": 0.7869029692266111, + "grad_norm": 1.3699760437011719, + "learning_rate": 0.00014754237615351946, + "loss": 0.2407, + "step": 20380 + }, + { + "epoch": 0.7872890845206378, + "grad_norm": 0.9736176133155823, + "learning_rate": 0.00014751663513391767, + "loss": 0.3644, + "step": 20390 + }, + { + "epoch": 0.7876751998146646, + "grad_norm": 0.9103217720985413, + "learning_rate": 0.00014749089411431588, + "loss": 0.1304, + "step": 20400 + }, + { + "epoch": 0.7880613151086915, + "grad_norm": 0.7736025452613831, + "learning_rate": 0.0001474651530947141, + "loss": 0.3848, + "step": 20410 + }, + { + "epoch": 0.7884474304027183, + "grad_norm": 1.6167576313018799, + "learning_rate": 0.00014743941207511228, + "loss": 0.3469, + "step": 20420 + }, + { + "epoch": 0.788833545696745, + "grad_norm": 0.1359117031097412, + "learning_rate": 0.00014741367105551052, + "loss": 0.28, + "step": 20430 + }, + { + "epoch": 0.7892196609907719, + "grad_norm": 0.20857305824756622, + "learning_rate": 0.00014738793003590874, + "loss": 0.2406, + "step": 20440 + }, + { + "epoch": 0.7896057762847987, + "grad_norm": 0.9618992805480957, + "learning_rate": 0.00014736218901630695, + "loss": 0.243, + "step": 20450 + }, + { + "epoch": 0.7899918915788254, + "grad_norm": 0.4663112163543701, + "learning_rate": 0.00014733644799670516, + "loss": 0.214, + "step": 20460 + }, + { + "epoch": 0.7903780068728522, + "grad_norm": 1.3922615051269531, + "learning_rate": 0.00014731070697710338, + "loss": 0.4601, + "step": 20470 + }, + { + "epoch": 0.7907641221668791, + "grad_norm": 1.5706521272659302, + "learning_rate": 0.0001472849659575016, + "loss": 0.2187, + "step": 20480 + }, + { + "epoch": 0.7911502374609058, + "grad_norm": 1.9315848350524902, + "learning_rate": 0.00014725922493789978, + "loss": 0.2691, + "step": 20490 + }, + { + "epoch": 0.7915363527549326, + "grad_norm": 2.6131045818328857, + "learning_rate": 0.00014723348391829802, + "loss": 0.335, + "step": 20500 + }, + { + "epoch": 0.7919224680489594, + "grad_norm": 2.6561343669891357, + "learning_rate": 0.00014720774289869623, + "loss": 0.2166, + "step": 20510 + }, + { + "epoch": 0.7923085833429863, + "grad_norm": 1.247574806213379, + "learning_rate": 0.00014718200187909444, + "loss": 0.4276, + "step": 20520 + }, + { + "epoch": 0.792694698637013, + "grad_norm": 0.7353020310401917, + "learning_rate": 0.00014715626085949266, + "loss": 0.3442, + "step": 20530 + }, + { + "epoch": 0.7930808139310398, + "grad_norm": 0.1100919172167778, + "learning_rate": 0.00014713051983989087, + "loss": 0.21, + "step": 20540 + }, + { + "epoch": 0.7934669292250666, + "grad_norm": 0.6608699560165405, + "learning_rate": 0.00014710477882028908, + "loss": 0.2593, + "step": 20550 + }, + { + "epoch": 0.7938530445190934, + "grad_norm": 1.0959700345993042, + "learning_rate": 0.0001470790378006873, + "loss": 0.1864, + "step": 20560 + }, + { + "epoch": 0.7942391598131202, + "grad_norm": 1.469408392906189, + "learning_rate": 0.0001470532967810855, + "loss": 0.251, + "step": 20570 + }, + { + "epoch": 0.794625275107147, + "grad_norm": 1.4239304065704346, + "learning_rate": 0.00014702755576148372, + "loss": 0.1782, + "step": 20580 + }, + { + "epoch": 0.7950113904011737, + "grad_norm": 0.5389681458473206, + "learning_rate": 0.00014700181474188194, + "loss": 0.3228, + "step": 20590 + }, + { + "epoch": 0.7953975056952006, + "grad_norm": 0.4744633436203003, + "learning_rate": 0.00014697607372228015, + "loss": 0.5448, + "step": 20600 + }, + { + "epoch": 0.7957836209892274, + "grad_norm": 1.1155211925506592, + "learning_rate": 0.00014695033270267836, + "loss": 0.3341, + "step": 20610 + }, + { + "epoch": 0.7961697362832542, + "grad_norm": 1.4301745891571045, + "learning_rate": 0.00014692459168307658, + "loss": 0.2376, + "step": 20620 + }, + { + "epoch": 0.7965558515772809, + "grad_norm": 2.0889878273010254, + "learning_rate": 0.0001468988506634748, + "loss": 0.252, + "step": 20630 + }, + { + "epoch": 0.7969419668713078, + "grad_norm": 1.420873761177063, + "learning_rate": 0.000146873109643873, + "loss": 0.4033, + "step": 20640 + }, + { + "epoch": 0.7973280821653346, + "grad_norm": 1.9909567832946777, + "learning_rate": 0.00014684736862427122, + "loss": 0.5206, + "step": 20650 + }, + { + "epoch": 0.7977141974593613, + "grad_norm": 1.3584216833114624, + "learning_rate": 0.00014682162760466943, + "loss": 0.2643, + "step": 20660 + }, + { + "epoch": 0.7981003127533881, + "grad_norm": 1.5488578081130981, + "learning_rate": 0.00014679588658506764, + "loss": 0.4098, + "step": 20670 + }, + { + "epoch": 0.798486428047415, + "grad_norm": 2.0128777027130127, + "learning_rate": 0.00014677014556546586, + "loss": 0.2498, + "step": 20680 + }, + { + "epoch": 0.7988725433414418, + "grad_norm": 0.6591671705245972, + "learning_rate": 0.00014674440454586407, + "loss": 0.3131, + "step": 20690 + }, + { + "epoch": 0.7992586586354685, + "grad_norm": 0.8321843147277832, + "learning_rate": 0.00014671866352626228, + "loss": 0.3849, + "step": 20700 + }, + { + "epoch": 0.7996447739294954, + "grad_norm": 1.4672768115997314, + "learning_rate": 0.0001466929225066605, + "loss": 0.3056, + "step": 20710 + }, + { + "epoch": 0.8000308892235222, + "grad_norm": 1.5972867012023926, + "learning_rate": 0.0001466671814870587, + "loss": 0.2699, + "step": 20720 + }, + { + "epoch": 0.8004170045175489, + "grad_norm": 0.685972273349762, + "learning_rate": 0.00014664144046745692, + "loss": 0.2598, + "step": 20730 + }, + { + "epoch": 0.8008031198115757, + "grad_norm": 1.2639611959457397, + "learning_rate": 0.00014661569944785514, + "loss": 0.2158, + "step": 20740 + }, + { + "epoch": 0.8011892351056026, + "grad_norm": 2.0752620697021484, + "learning_rate": 0.00014658995842825335, + "loss": 0.2782, + "step": 20750 + }, + { + "epoch": 0.8015753503996293, + "grad_norm": 4.334362983703613, + "learning_rate": 0.00014656421740865156, + "loss": 0.5083, + "step": 20760 + }, + { + "epoch": 0.8019614656936561, + "grad_norm": 2.233436107635498, + "learning_rate": 0.00014653847638904978, + "loss": 0.4378, + "step": 20770 + }, + { + "epoch": 0.8023475809876829, + "grad_norm": 0.5392621755599976, + "learning_rate": 0.000146512735369448, + "loss": 0.4467, + "step": 20780 + }, + { + "epoch": 0.8027336962817098, + "grad_norm": 0.3768058121204376, + "learning_rate": 0.0001464869943498462, + "loss": 0.3542, + "step": 20790 + }, + { + "epoch": 0.8031198115757365, + "grad_norm": 0.5712292194366455, + "learning_rate": 0.00014646125333024442, + "loss": 0.281, + "step": 20800 + }, + { + "epoch": 0.8035059268697633, + "grad_norm": 0.8674315214157104, + "learning_rate": 0.00014643551231064263, + "loss": 0.2945, + "step": 20810 + }, + { + "epoch": 0.8038920421637901, + "grad_norm": 1.6206015348434448, + "learning_rate": 0.00014640977129104084, + "loss": 0.2627, + "step": 20820 + }, + { + "epoch": 0.804278157457817, + "grad_norm": 1.8807138204574585, + "learning_rate": 0.00014638403027143906, + "loss": 0.3768, + "step": 20830 + }, + { + "epoch": 0.8046642727518437, + "grad_norm": 0.9721212387084961, + "learning_rate": 0.00014635828925183727, + "loss": 0.4511, + "step": 20840 + }, + { + "epoch": 0.8050503880458705, + "grad_norm": 0.569038987159729, + "learning_rate": 0.00014633254823223548, + "loss": 0.2235, + "step": 20850 + }, + { + "epoch": 0.8054365033398972, + "grad_norm": 0.5981199741363525, + "learning_rate": 0.0001463068072126337, + "loss": 0.1413, + "step": 20860 + }, + { + "epoch": 0.8058226186339241, + "grad_norm": 3.696936845779419, + "learning_rate": 0.00014628106619303194, + "loss": 0.3779, + "step": 20870 + }, + { + "epoch": 0.8062087339279509, + "grad_norm": 1.5039314031600952, + "learning_rate": 0.00014625532517343012, + "loss": 0.2117, + "step": 20880 + }, + { + "epoch": 0.8065948492219777, + "grad_norm": 1.7800476551055908, + "learning_rate": 0.00014622958415382834, + "loss": 0.2343, + "step": 20890 + }, + { + "epoch": 0.8069809645160044, + "grad_norm": 0.18587611615657806, + "learning_rate": 0.00014620384313422655, + "loss": 0.2229, + "step": 20900 + }, + { + "epoch": 0.8073670798100313, + "grad_norm": 3.5351600646972656, + "learning_rate": 0.00014617810211462476, + "loss": 0.3632, + "step": 20910 + }, + { + "epoch": 0.8077531951040581, + "grad_norm": 3.9036381244659424, + "learning_rate": 0.00014615236109502298, + "loss": 0.2941, + "step": 20920 + }, + { + "epoch": 0.8081393103980848, + "grad_norm": 1.9554537534713745, + "learning_rate": 0.0001461266200754212, + "loss": 0.2685, + "step": 20930 + }, + { + "epoch": 0.8085254256921116, + "grad_norm": 1.0424940586090088, + "learning_rate": 0.00014610087905581943, + "loss": 0.2617, + "step": 20940 + }, + { + "epoch": 0.8089115409861385, + "grad_norm": 6.593061923980713, + "learning_rate": 0.00014607513803621762, + "loss": 0.5034, + "step": 20950 + }, + { + "epoch": 0.8092976562801653, + "grad_norm": 11.373255729675293, + "learning_rate": 0.00014604939701661583, + "loss": 0.2694, + "step": 20960 + }, + { + "epoch": 0.809683771574192, + "grad_norm": 1.4843833446502686, + "learning_rate": 0.00014602365599701404, + "loss": 0.4526, + "step": 20970 + }, + { + "epoch": 0.8100698868682189, + "grad_norm": 3.6086366176605225, + "learning_rate": 0.00014599791497741225, + "loss": 0.4536, + "step": 20980 + }, + { + "epoch": 0.8104560021622457, + "grad_norm": 0.6381124258041382, + "learning_rate": 0.00014597217395781047, + "loss": 0.2918, + "step": 20990 + }, + { + "epoch": 0.8108421174562724, + "grad_norm": 1.9507087469100952, + "learning_rate": 0.00014594643293820868, + "loss": 0.1625, + "step": 21000 + }, + { + "epoch": 0.8112282327502992, + "grad_norm": 0.954914391040802, + "learning_rate": 0.00014592069191860692, + "loss": 0.4204, + "step": 21010 + }, + { + "epoch": 0.8116143480443261, + "grad_norm": 2.633601427078247, + "learning_rate": 0.0001458949508990051, + "loss": 0.4017, + "step": 21020 + }, + { + "epoch": 0.8120004633383529, + "grad_norm": 0.7492280602455139, + "learning_rate": 0.00014586920987940332, + "loss": 0.3209, + "step": 21030 + }, + { + "epoch": 0.8123865786323796, + "grad_norm": 1.1670303344726562, + "learning_rate": 0.00014584346885980153, + "loss": 0.3361, + "step": 21040 + }, + { + "epoch": 0.8127726939264064, + "grad_norm": 0.8462283611297607, + "learning_rate": 0.00014581772784019975, + "loss": 0.3156, + "step": 21050 + }, + { + "epoch": 0.8131588092204333, + "grad_norm": 2.151671886444092, + "learning_rate": 0.000145791986820598, + "loss": 0.2081, + "step": 21060 + }, + { + "epoch": 0.81354492451446, + "grad_norm": 1.0742170810699463, + "learning_rate": 0.00014576624580099617, + "loss": 0.2247, + "step": 21070 + }, + { + "epoch": 0.8139310398084868, + "grad_norm": 1.2256931066513062, + "learning_rate": 0.00014574050478139442, + "loss": 0.3542, + "step": 21080 + }, + { + "epoch": 0.8143171551025136, + "grad_norm": 3.740055561065674, + "learning_rate": 0.0001457147637617926, + "loss": 0.1856, + "step": 21090 + }, + { + "epoch": 0.8147032703965404, + "grad_norm": 0.03290783613920212, + "learning_rate": 0.00014568902274219081, + "loss": 0.2616, + "step": 21100 + }, + { + "epoch": 0.8150893856905672, + "grad_norm": 0.13995541632175446, + "learning_rate": 0.00014566328172258903, + "loss": 0.2107, + "step": 21110 + }, + { + "epoch": 0.815475500984594, + "grad_norm": 0.44371533393859863, + "learning_rate": 0.00014563754070298724, + "loss": 0.3091, + "step": 21120 + }, + { + "epoch": 0.8158616162786207, + "grad_norm": 2.7269155979156494, + "learning_rate": 0.00014561179968338548, + "loss": 0.5674, + "step": 21130 + }, + { + "epoch": 0.8162477315726476, + "grad_norm": 0.7148515582084656, + "learning_rate": 0.00014558605866378367, + "loss": 0.4721, + "step": 21140 + }, + { + "epoch": 0.8166338468666744, + "grad_norm": 1.0939961671829224, + "learning_rate": 0.0001455603176441819, + "loss": 0.2666, + "step": 21150 + }, + { + "epoch": 0.8170199621607012, + "grad_norm": 1.7923939228057861, + "learning_rate": 0.0001455345766245801, + "loss": 0.2634, + "step": 21160 + }, + { + "epoch": 0.8174060774547279, + "grad_norm": 0.725130021572113, + "learning_rate": 0.00014550883560497834, + "loss": 0.1968, + "step": 21170 + }, + { + "epoch": 0.8177921927487548, + "grad_norm": 0.443892240524292, + "learning_rate": 0.00014548309458537655, + "loss": 0.556, + "step": 21180 + }, + { + "epoch": 0.8181783080427816, + "grad_norm": 1.3551362752914429, + "learning_rate": 0.00014545735356577473, + "loss": 0.4115, + "step": 21190 + }, + { + "epoch": 0.8185644233368083, + "grad_norm": 0.6360037922859192, + "learning_rate": 0.00014543161254617297, + "loss": 0.3176, + "step": 21200 + }, + { + "epoch": 0.8189505386308351, + "grad_norm": 2.634549140930176, + "learning_rate": 0.00014540587152657116, + "loss": 0.3662, + "step": 21210 + }, + { + "epoch": 0.819336653924862, + "grad_norm": 3.267479181289673, + "learning_rate": 0.0001453801305069694, + "loss": 0.2925, + "step": 21220 + }, + { + "epoch": 0.8197227692188888, + "grad_norm": 1.3607991933822632, + "learning_rate": 0.0001453543894873676, + "loss": 0.1795, + "step": 21230 + }, + { + "epoch": 0.8201088845129155, + "grad_norm": 0.6499636769294739, + "learning_rate": 0.00014532864846776583, + "loss": 0.4451, + "step": 21240 + }, + { + "epoch": 0.8204949998069424, + "grad_norm": 0.7486141920089722, + "learning_rate": 0.00014530290744816404, + "loss": 0.3982, + "step": 21250 + }, + { + "epoch": 0.8208811151009692, + "grad_norm": 0.6481244564056396, + "learning_rate": 0.00014527716642856223, + "loss": 0.3358, + "step": 21260 + }, + { + "epoch": 0.8212672303949959, + "grad_norm": 1.0736982822418213, + "learning_rate": 0.00014525142540896047, + "loss": 0.5264, + "step": 21270 + }, + { + "epoch": 0.8216533456890227, + "grad_norm": 2.0467801094055176, + "learning_rate": 0.00014522568438935865, + "loss": 0.3645, + "step": 21280 + }, + { + "epoch": 0.8220394609830496, + "grad_norm": 2.671499013900757, + "learning_rate": 0.0001451999433697569, + "loss": 0.4779, + "step": 21290 + }, + { + "epoch": 0.8224255762770764, + "grad_norm": 1.4449695348739624, + "learning_rate": 0.00014517420235015508, + "loss": 0.3555, + "step": 21300 + }, + { + "epoch": 0.8228116915711031, + "grad_norm": 1.7484570741653442, + "learning_rate": 0.00014514846133055332, + "loss": 0.2921, + "step": 21310 + }, + { + "epoch": 0.8231978068651299, + "grad_norm": 0.9985783100128174, + "learning_rate": 0.00014512272031095153, + "loss": 0.1861, + "step": 21320 + }, + { + "epoch": 0.8235839221591568, + "grad_norm": 2.0824766159057617, + "learning_rate": 0.00014509697929134972, + "loss": 0.3582, + "step": 21330 + }, + { + "epoch": 0.8239700374531835, + "grad_norm": 0.8448216915130615, + "learning_rate": 0.00014507123827174796, + "loss": 0.3674, + "step": 21340 + }, + { + "epoch": 0.8243561527472103, + "grad_norm": 2.027111053466797, + "learning_rate": 0.00014504549725214615, + "loss": 0.3297, + "step": 21350 + }, + { + "epoch": 0.8247422680412371, + "grad_norm": 1.560604214668274, + "learning_rate": 0.0001450197562325444, + "loss": 0.3303, + "step": 21360 + }, + { + "epoch": 0.825128383335264, + "grad_norm": 2.179563045501709, + "learning_rate": 0.0001449940152129426, + "loss": 0.1704, + "step": 21370 + }, + { + "epoch": 0.8255144986292907, + "grad_norm": 1.6268993616104126, + "learning_rate": 0.00014496827419334081, + "loss": 0.3316, + "step": 21380 + }, + { + "epoch": 0.8259006139233175, + "grad_norm": 0.8986232280731201, + "learning_rate": 0.00014494253317373903, + "loss": 0.3361, + "step": 21390 + }, + { + "epoch": 0.8262867292173443, + "grad_norm": 0.8017566204071045, + "learning_rate": 0.00014491679215413721, + "loss": 0.3992, + "step": 21400 + }, + { + "epoch": 0.8266728445113711, + "grad_norm": 0.879162073135376, + "learning_rate": 0.00014489105113453545, + "loss": 0.3747, + "step": 21410 + }, + { + "epoch": 0.8270589598053979, + "grad_norm": 1.948309302330017, + "learning_rate": 0.00014486531011493364, + "loss": 0.2323, + "step": 21420 + }, + { + "epoch": 0.8274450750994247, + "grad_norm": 1.34186851978302, + "learning_rate": 0.00014483956909533188, + "loss": 0.3856, + "step": 21430 + }, + { + "epoch": 0.8278311903934514, + "grad_norm": 1.3884105682373047, + "learning_rate": 0.0001448138280757301, + "loss": 0.3044, + "step": 21440 + }, + { + "epoch": 0.8282173056874783, + "grad_norm": 1.3283358812332153, + "learning_rate": 0.0001447880870561283, + "loss": 0.3624, + "step": 21450 + }, + { + "epoch": 0.8286034209815051, + "grad_norm": 13.829493522644043, + "learning_rate": 0.00014476234603652652, + "loss": 0.2217, + "step": 21460 + }, + { + "epoch": 0.8289895362755318, + "grad_norm": 3.4602255821228027, + "learning_rate": 0.0001447366050169247, + "loss": 0.4964, + "step": 21470 + }, + { + "epoch": 0.8293756515695587, + "grad_norm": 0.42417749762535095, + "learning_rate": 0.00014471086399732295, + "loss": 0.2262, + "step": 21480 + }, + { + "epoch": 0.8297617668635855, + "grad_norm": 3.1674726009368896, + "learning_rate": 0.00014468512297772113, + "loss": 0.3327, + "step": 21490 + }, + { + "epoch": 0.8301478821576123, + "grad_norm": 0.7226410508155823, + "learning_rate": 0.00014465938195811937, + "loss": 0.3152, + "step": 21500 + }, + { + "epoch": 0.830533997451639, + "grad_norm": 0.7477544546127319, + "learning_rate": 0.0001446336409385176, + "loss": 0.3676, + "step": 21510 + }, + { + "epoch": 0.8309201127456659, + "grad_norm": 1.6237748861312866, + "learning_rate": 0.0001446078999189158, + "loss": 0.336, + "step": 21520 + }, + { + "epoch": 0.8313062280396927, + "grad_norm": 2.8118655681610107, + "learning_rate": 0.00014458215889931401, + "loss": 0.3513, + "step": 21530 + }, + { + "epoch": 0.8316923433337194, + "grad_norm": 2.6571335792541504, + "learning_rate": 0.0001445564178797122, + "loss": 0.4008, + "step": 21540 + }, + { + "epoch": 0.8320784586277462, + "grad_norm": 0.7042214870452881, + "learning_rate": 0.00014453067686011044, + "loss": 0.3433, + "step": 21550 + }, + { + "epoch": 0.8324645739217731, + "grad_norm": 0.1551884561777115, + "learning_rate": 0.00014450493584050865, + "loss": 0.1748, + "step": 21560 + }, + { + "epoch": 0.8328506892157999, + "grad_norm": 1.2595586776733398, + "learning_rate": 0.00014447919482090687, + "loss": 0.2567, + "step": 21570 + }, + { + "epoch": 0.8332368045098266, + "grad_norm": 3.800837516784668, + "learning_rate": 0.00014445345380130508, + "loss": 0.31, + "step": 21580 + }, + { + "epoch": 0.8336229198038534, + "grad_norm": 2.2269585132598877, + "learning_rate": 0.0001444277127817033, + "loss": 0.5146, + "step": 21590 + }, + { + "epoch": 0.8340090350978803, + "grad_norm": 0.5263709425926208, + "learning_rate": 0.0001444019717621015, + "loss": 0.2369, + "step": 21600 + }, + { + "epoch": 0.834395150391907, + "grad_norm": 0.27564361691474915, + "learning_rate": 0.00014437623074249972, + "loss": 0.308, + "step": 21610 + }, + { + "epoch": 0.8347812656859338, + "grad_norm": 4.639162540435791, + "learning_rate": 0.00014435048972289793, + "loss": 0.3806, + "step": 21620 + }, + { + "epoch": 0.8351673809799606, + "grad_norm": 0.9607310891151428, + "learning_rate": 0.00014432474870329615, + "loss": 0.2245, + "step": 21630 + }, + { + "epoch": 0.8355534962739875, + "grad_norm": 4.01082706451416, + "learning_rate": 0.00014429900768369436, + "loss": 0.3934, + "step": 21640 + }, + { + "epoch": 0.8359396115680142, + "grad_norm": 0.9401382803916931, + "learning_rate": 0.00014427326666409257, + "loss": 0.4726, + "step": 21650 + }, + { + "epoch": 0.836325726862041, + "grad_norm": 2.1189887523651123, + "learning_rate": 0.0001442475256444908, + "loss": 0.5434, + "step": 21660 + }, + { + "epoch": 0.8367118421560678, + "grad_norm": 2.370849132537842, + "learning_rate": 0.000144221784624889, + "loss": 0.3885, + "step": 21670 + }, + { + "epoch": 0.8370979574500946, + "grad_norm": 0.595461368560791, + "learning_rate": 0.00014419604360528721, + "loss": 0.3811, + "step": 21680 + }, + { + "epoch": 0.8374840727441214, + "grad_norm": 0.9013121128082275, + "learning_rate": 0.00014417030258568543, + "loss": 0.2406, + "step": 21690 + }, + { + "epoch": 0.8378701880381482, + "grad_norm": 1.3803203105926514, + "learning_rate": 0.00014414456156608364, + "loss": 0.2197, + "step": 21700 + }, + { + "epoch": 0.8382563033321749, + "grad_norm": 1.6163750886917114, + "learning_rate": 0.00014411882054648185, + "loss": 0.2622, + "step": 21710 + }, + { + "epoch": 0.8386424186262018, + "grad_norm": 3.604384660720825, + "learning_rate": 0.00014409307952688007, + "loss": 0.3688, + "step": 21720 + }, + { + "epoch": 0.8390285339202286, + "grad_norm": 1.4415024518966675, + "learning_rate": 0.00014406733850727828, + "loss": 0.2375, + "step": 21730 + }, + { + "epoch": 0.8394146492142553, + "grad_norm": 1.4819844961166382, + "learning_rate": 0.0001440415974876765, + "loss": 0.4065, + "step": 21740 + }, + { + "epoch": 0.8398007645082822, + "grad_norm": 1.3991562128067017, + "learning_rate": 0.0001440158564680747, + "loss": 0.2942, + "step": 21750 + }, + { + "epoch": 0.840186879802309, + "grad_norm": 2.022538185119629, + "learning_rate": 0.00014399011544847292, + "loss": 0.2699, + "step": 21760 + }, + { + "epoch": 0.8405729950963358, + "grad_norm": 2.418179512023926, + "learning_rate": 0.00014396437442887113, + "loss": 0.3481, + "step": 21770 + }, + { + "epoch": 0.8409591103903625, + "grad_norm": 0.930482029914856, + "learning_rate": 0.00014393863340926935, + "loss": 0.3257, + "step": 21780 + }, + { + "epoch": 0.8413452256843894, + "grad_norm": 3.616676092147827, + "learning_rate": 0.00014391289238966756, + "loss": 0.3844, + "step": 21790 + }, + { + "epoch": 0.8417313409784162, + "grad_norm": 1.7993167638778687, + "learning_rate": 0.00014388715137006577, + "loss": 0.4569, + "step": 21800 + }, + { + "epoch": 0.8421174562724429, + "grad_norm": 1.9243824481964111, + "learning_rate": 0.000143861410350464, + "loss": 0.282, + "step": 21810 + }, + { + "epoch": 0.8425035715664697, + "grad_norm": 1.6578466892242432, + "learning_rate": 0.0001438356693308622, + "loss": 0.314, + "step": 21820 + }, + { + "epoch": 0.8428896868604966, + "grad_norm": 1.4833110570907593, + "learning_rate": 0.00014380992831126041, + "loss": 0.2698, + "step": 21830 + }, + { + "epoch": 0.8432758021545234, + "grad_norm": 1.9081813097000122, + "learning_rate": 0.00014378418729165863, + "loss": 0.2392, + "step": 21840 + }, + { + "epoch": 0.8436619174485501, + "grad_norm": 1.8436548709869385, + "learning_rate": 0.00014375844627205684, + "loss": 0.1906, + "step": 21850 + }, + { + "epoch": 0.8440480327425769, + "grad_norm": 4.679655075073242, + "learning_rate": 0.00014373270525245505, + "loss": 0.6446, + "step": 21860 + }, + { + "epoch": 0.8444341480366038, + "grad_norm": 1.8216800689697266, + "learning_rate": 0.0001437069642328533, + "loss": 0.3272, + "step": 21870 + }, + { + "epoch": 0.8448202633306305, + "grad_norm": 1.0107386112213135, + "learning_rate": 0.00014368122321325148, + "loss": 0.3003, + "step": 21880 + }, + { + "epoch": 0.8452063786246573, + "grad_norm": 0.9573041796684265, + "learning_rate": 0.0001436554821936497, + "loss": 0.1757, + "step": 21890 + }, + { + "epoch": 0.8455924939186841, + "grad_norm": 0.9367936253547668, + "learning_rate": 0.0001436297411740479, + "loss": 0.2166, + "step": 21900 + }, + { + "epoch": 0.845978609212711, + "grad_norm": 3.1247951984405518, + "learning_rate": 0.00014360400015444612, + "loss": 0.3488, + "step": 21910 + }, + { + "epoch": 0.8463647245067377, + "grad_norm": 3.9438281059265137, + "learning_rate": 0.00014357825913484433, + "loss": 0.4498, + "step": 21920 + }, + { + "epoch": 0.8467508398007645, + "grad_norm": 0.909572958946228, + "learning_rate": 0.00014355251811524255, + "loss": 0.2698, + "step": 21930 + }, + { + "epoch": 0.8471369550947913, + "grad_norm": 2.6619715690612793, + "learning_rate": 0.0001435267770956408, + "loss": 0.4204, + "step": 21940 + }, + { + "epoch": 0.8475230703888181, + "grad_norm": 0.6143421530723572, + "learning_rate": 0.00014350103607603897, + "loss": 0.3573, + "step": 21950 + }, + { + "epoch": 0.8479091856828449, + "grad_norm": 0.3222682476043701, + "learning_rate": 0.0001434752950564372, + "loss": 0.2172, + "step": 21960 + }, + { + "epoch": 0.8482953009768717, + "grad_norm": 1.772538185119629, + "learning_rate": 0.0001434495540368354, + "loss": 0.4203, + "step": 21970 + }, + { + "epoch": 0.8486814162708984, + "grad_norm": 1.6327133178710938, + "learning_rate": 0.0001434238130172336, + "loss": 0.2153, + "step": 21980 + }, + { + "epoch": 0.8490675315649253, + "grad_norm": 1.0445518493652344, + "learning_rate": 0.00014339807199763183, + "loss": 0.3392, + "step": 21990 + }, + { + "epoch": 0.8494536468589521, + "grad_norm": 3.6096575260162354, + "learning_rate": 0.00014337233097803004, + "loss": 0.2691, + "step": 22000 + }, + { + "epoch": 0.8498397621529789, + "grad_norm": 1.4343204498291016, + "learning_rate": 0.00014334658995842828, + "loss": 0.3118, + "step": 22010 + }, + { + "epoch": 0.8502258774470057, + "grad_norm": 1.0348806381225586, + "learning_rate": 0.00014332084893882647, + "loss": 0.2886, + "step": 22020 + }, + { + "epoch": 0.8506119927410325, + "grad_norm": 0.5164201855659485, + "learning_rate": 0.00014329510791922468, + "loss": 0.2943, + "step": 22030 + }, + { + "epoch": 0.8509981080350593, + "grad_norm": 1.8109897375106812, + "learning_rate": 0.0001432693668996229, + "loss": 0.4021, + "step": 22040 + }, + { + "epoch": 0.851384223329086, + "grad_norm": 2.7065579891204834, + "learning_rate": 0.0001432436258800211, + "loss": 0.371, + "step": 22050 + }, + { + "epoch": 0.8517703386231129, + "grad_norm": 2.3028764724731445, + "learning_rate": 0.00014321788486041935, + "loss": 0.5026, + "step": 22060 + }, + { + "epoch": 0.8521564539171397, + "grad_norm": 1.3945609331130981, + "learning_rate": 0.00014319214384081753, + "loss": 0.4444, + "step": 22070 + }, + { + "epoch": 0.8525425692111664, + "grad_norm": 2.407951593399048, + "learning_rate": 0.00014316640282121577, + "loss": 0.4465, + "step": 22080 + }, + { + "epoch": 0.8529286845051932, + "grad_norm": 4.120944976806641, + "learning_rate": 0.00014314066180161396, + "loss": 0.3142, + "step": 22090 + }, + { + "epoch": 0.8533147997992201, + "grad_norm": 1.8841919898986816, + "learning_rate": 0.00014311492078201217, + "loss": 0.3609, + "step": 22100 + }, + { + "epoch": 0.8537009150932469, + "grad_norm": 5.1519951820373535, + "learning_rate": 0.00014308917976241039, + "loss": 0.3062, + "step": 22110 + }, + { + "epoch": 0.8540870303872736, + "grad_norm": 2.7280924320220947, + "learning_rate": 0.0001430634387428086, + "loss": 0.3678, + "step": 22120 + }, + { + "epoch": 0.8544731456813004, + "grad_norm": 0.23237809538841248, + "learning_rate": 0.00014303769772320684, + "loss": 0.2979, + "step": 22130 + }, + { + "epoch": 0.8548592609753273, + "grad_norm": 1.0587934255599976, + "learning_rate": 0.00014301195670360503, + "loss": 0.5672, + "step": 22140 + }, + { + "epoch": 0.855245376269354, + "grad_norm": 1.854447603225708, + "learning_rate": 0.00014298621568400327, + "loss": 0.3657, + "step": 22150 + }, + { + "epoch": 0.8556314915633808, + "grad_norm": 0.9766449332237244, + "learning_rate": 0.00014296047466440145, + "loss": 0.3219, + "step": 22160 + }, + { + "epoch": 0.8560176068574076, + "grad_norm": 1.7281047105789185, + "learning_rate": 0.00014293473364479967, + "loss": 0.3485, + "step": 22170 + }, + { + "epoch": 0.8564037221514345, + "grad_norm": 1.8366886377334595, + "learning_rate": 0.0001429089926251979, + "loss": 0.2929, + "step": 22180 + }, + { + "epoch": 0.8567898374454612, + "grad_norm": 0.8708136677742004, + "learning_rate": 0.0001428832516055961, + "loss": 0.3633, + "step": 22190 + }, + { + "epoch": 0.857175952739488, + "grad_norm": 1.5010342597961426, + "learning_rate": 0.00014285751058599433, + "loss": 0.3646, + "step": 22200 + }, + { + "epoch": 0.8575620680335148, + "grad_norm": 1.7438324689865112, + "learning_rate": 0.00014283176956639252, + "loss": 0.3093, + "step": 22210 + }, + { + "epoch": 0.8579481833275416, + "grad_norm": 1.5954341888427734, + "learning_rate": 0.00014280602854679076, + "loss": 0.2435, + "step": 22220 + }, + { + "epoch": 0.8583342986215684, + "grad_norm": 2.5279555320739746, + "learning_rate": 0.00014278028752718895, + "loss": 0.2526, + "step": 22230 + }, + { + "epoch": 0.8587204139155952, + "grad_norm": 3.4773006439208984, + "learning_rate": 0.00014275454650758716, + "loss": 0.3763, + "step": 22240 + }, + { + "epoch": 0.8591065292096219, + "grad_norm": 0.25110548734664917, + "learning_rate": 0.0001427288054879854, + "loss": 0.2265, + "step": 22250 + }, + { + "epoch": 0.8594926445036488, + "grad_norm": 2.3060946464538574, + "learning_rate": 0.00014270306446838359, + "loss": 0.3756, + "step": 22260 + }, + { + "epoch": 0.8598787597976756, + "grad_norm": 2.206308364868164, + "learning_rate": 0.00014267732344878183, + "loss": 0.295, + "step": 22270 + }, + { + "epoch": 0.8602648750917024, + "grad_norm": 1.1059858798980713, + "learning_rate": 0.00014265158242918, + "loss": 0.2382, + "step": 22280 + }, + { + "epoch": 0.8606509903857292, + "grad_norm": 0.045407798141241074, + "learning_rate": 0.00014262584140957825, + "loss": 0.1725, + "step": 22290 + }, + { + "epoch": 0.861037105679756, + "grad_norm": 0.2532581686973572, + "learning_rate": 0.00014260010038997644, + "loss": 0.3089, + "step": 22300 + }, + { + "epoch": 0.8614232209737828, + "grad_norm": 0.8851459622383118, + "learning_rate": 0.00014257435937037468, + "loss": 0.2721, + "step": 22310 + }, + { + "epoch": 0.8618093362678095, + "grad_norm": 2.9988598823547363, + "learning_rate": 0.0001425486183507729, + "loss": 0.3854, + "step": 22320 + }, + { + "epoch": 0.8621954515618364, + "grad_norm": 1.888629674911499, + "learning_rate": 0.00014252287733117108, + "loss": 0.4472, + "step": 22330 + }, + { + "epoch": 0.8625815668558632, + "grad_norm": 0.9517232179641724, + "learning_rate": 0.00014249713631156932, + "loss": 0.3094, + "step": 22340 + }, + { + "epoch": 0.86296768214989, + "grad_norm": 1.4752097129821777, + "learning_rate": 0.0001424713952919675, + "loss": 0.2649, + "step": 22350 + }, + { + "epoch": 0.8633537974439167, + "grad_norm": 1.642285704612732, + "learning_rate": 0.00014244565427236575, + "loss": 0.4418, + "step": 22360 + }, + { + "epoch": 0.8637399127379436, + "grad_norm": 2.2177469730377197, + "learning_rate": 0.00014241991325276396, + "loss": 0.6865, + "step": 22370 + }, + { + "epoch": 0.8641260280319704, + "grad_norm": 0.9089158773422241, + "learning_rate": 0.00014239417223316217, + "loss": 0.1842, + "step": 22380 + }, + { + "epoch": 0.8645121433259971, + "grad_norm": 1.108091115951538, + "learning_rate": 0.00014236843121356039, + "loss": 0.3181, + "step": 22390 + }, + { + "epoch": 0.8648982586200239, + "grad_norm": 3.1317670345306396, + "learning_rate": 0.00014234269019395857, + "loss": 0.4798, + "step": 22400 + }, + { + "epoch": 0.8652843739140508, + "grad_norm": 1.0352108478546143, + "learning_rate": 0.0001423169491743568, + "loss": 0.3473, + "step": 22410 + }, + { + "epoch": 0.8656704892080775, + "grad_norm": 0.48546215891838074, + "learning_rate": 0.000142291208154755, + "loss": 0.2731, + "step": 22420 + }, + { + "epoch": 0.8660566045021043, + "grad_norm": 1.1608140468597412, + "learning_rate": 0.00014226546713515324, + "loss": 0.3202, + "step": 22430 + }, + { + "epoch": 0.8664427197961311, + "grad_norm": 0.19237665832042694, + "learning_rate": 0.00014223972611555145, + "loss": 0.4985, + "step": 22440 + }, + { + "epoch": 0.866828835090158, + "grad_norm": 0.12056539207696915, + "learning_rate": 0.00014221398509594967, + "loss": 0.2071, + "step": 22450 + }, + { + "epoch": 0.8672149503841847, + "grad_norm": 1.416548252105713, + "learning_rate": 0.00014218824407634788, + "loss": 0.2572, + "step": 22460 + }, + { + "epoch": 0.8676010656782115, + "grad_norm": 0.816148042678833, + "learning_rate": 0.00014216250305674607, + "loss": 0.2368, + "step": 22470 + }, + { + "epoch": 0.8679871809722383, + "grad_norm": 3.2394118309020996, + "learning_rate": 0.0001421367620371443, + "loss": 0.3768, + "step": 22480 + }, + { + "epoch": 0.8683732962662651, + "grad_norm": 0.7187336087226868, + "learning_rate": 0.00014211102101754252, + "loss": 0.3297, + "step": 22490 + }, + { + "epoch": 0.8687594115602919, + "grad_norm": 0.5154927372932434, + "learning_rate": 0.00014208527999794073, + "loss": 0.3301, + "step": 22500 + }, + { + "epoch": 0.8691455268543187, + "grad_norm": 1.0461368560791016, + "learning_rate": 0.00014205953897833895, + "loss": 0.335, + "step": 22510 + }, + { + "epoch": 0.8695316421483454, + "grad_norm": 0.9720495343208313, + "learning_rate": 0.00014203379795873716, + "loss": 0.2405, + "step": 22520 + }, + { + "epoch": 0.8699177574423723, + "grad_norm": 2.147216558456421, + "learning_rate": 0.00014200805693913537, + "loss": 0.3291, + "step": 22530 + }, + { + "epoch": 0.8703038727363991, + "grad_norm": 1.162614345550537, + "learning_rate": 0.00014198231591953356, + "loss": 0.2705, + "step": 22540 + }, + { + "epoch": 0.8706899880304259, + "grad_norm": 0.5020268559455872, + "learning_rate": 0.0001419565748999318, + "loss": 0.4403, + "step": 22550 + }, + { + "epoch": 0.8710761033244527, + "grad_norm": 3.198425531387329, + "learning_rate": 0.00014193083388033, + "loss": 0.2244, + "step": 22560 + }, + { + "epoch": 0.8714622186184795, + "grad_norm": 2.4578161239624023, + "learning_rate": 0.00014190509286072823, + "loss": 0.3993, + "step": 22570 + }, + { + "epoch": 0.8718483339125063, + "grad_norm": 1.4559924602508545, + "learning_rate": 0.00014187935184112644, + "loss": 0.2631, + "step": 22580 + }, + { + "epoch": 0.872234449206533, + "grad_norm": 0.6834856271743774, + "learning_rate": 0.00014185361082152465, + "loss": 0.3183, + "step": 22590 + }, + { + "epoch": 0.8726205645005599, + "grad_norm": 3.9718177318573, + "learning_rate": 0.00014182786980192287, + "loss": 0.439, + "step": 22600 + }, + { + "epoch": 0.8730066797945867, + "grad_norm": 1.7797685861587524, + "learning_rate": 0.00014180212878232105, + "loss": 0.2938, + "step": 22610 + }, + { + "epoch": 0.8733927950886134, + "grad_norm": 2.137479543685913, + "learning_rate": 0.0001417763877627193, + "loss": 0.4733, + "step": 22620 + }, + { + "epoch": 0.8737789103826402, + "grad_norm": 0.7577596306800842, + "learning_rate": 0.0001417506467431175, + "loss": 0.3599, + "step": 22630 + }, + { + "epoch": 0.8741650256766671, + "grad_norm": 0.8944536447525024, + "learning_rate": 0.00014172490572351572, + "loss": 0.3393, + "step": 22640 + }, + { + "epoch": 0.8745511409706939, + "grad_norm": 1.59170663356781, + "learning_rate": 0.00014169916470391393, + "loss": 0.3578, + "step": 22650 + }, + { + "epoch": 0.8749372562647206, + "grad_norm": 0.9852517247200012, + "learning_rate": 0.00014167342368431215, + "loss": 0.4203, + "step": 22660 + }, + { + "epoch": 0.8753233715587474, + "grad_norm": 1.0319880247116089, + "learning_rate": 0.00014164768266471036, + "loss": 0.3332, + "step": 22670 + }, + { + "epoch": 0.8757094868527743, + "grad_norm": 3.713357925415039, + "learning_rate": 0.00014162194164510857, + "loss": 0.412, + "step": 22680 + }, + { + "epoch": 0.876095602146801, + "grad_norm": 1.3197567462921143, + "learning_rate": 0.00014159620062550679, + "loss": 0.286, + "step": 22690 + }, + { + "epoch": 0.8764817174408278, + "grad_norm": 3.604928493499756, + "learning_rate": 0.000141570459605905, + "loss": 0.4206, + "step": 22700 + }, + { + "epoch": 0.8768678327348546, + "grad_norm": 3.1074795722961426, + "learning_rate": 0.0001415447185863032, + "loss": 0.2261, + "step": 22710 + }, + { + "epoch": 0.8772539480288815, + "grad_norm": 2.855581760406494, + "learning_rate": 0.00014151897756670143, + "loss": 0.6825, + "step": 22720 + }, + { + "epoch": 0.8776400633229082, + "grad_norm": 3.5756995677948, + "learning_rate": 0.00014149323654709964, + "loss": 0.4283, + "step": 22730 + }, + { + "epoch": 0.878026178616935, + "grad_norm": 1.4255709648132324, + "learning_rate": 0.00014146749552749785, + "loss": 0.3243, + "step": 22740 + }, + { + "epoch": 0.8784122939109618, + "grad_norm": 0.3095746338367462, + "learning_rate": 0.00014144175450789607, + "loss": 0.1052, + "step": 22750 + }, + { + "epoch": 0.8787984092049886, + "grad_norm": 1.4129611253738403, + "learning_rate": 0.00014141601348829428, + "loss": 0.2746, + "step": 22760 + }, + { + "epoch": 0.8791845244990154, + "grad_norm": 0.6448315382003784, + "learning_rate": 0.0001413902724686925, + "loss": 0.3312, + "step": 22770 + }, + { + "epoch": 0.8795706397930422, + "grad_norm": 1.6328849792480469, + "learning_rate": 0.0001413645314490907, + "loss": 0.2346, + "step": 22780 + }, + { + "epoch": 0.879956755087069, + "grad_norm": 1.0974128246307373, + "learning_rate": 0.00014133879042948892, + "loss": 0.2446, + "step": 22790 + }, + { + "epoch": 0.8803428703810958, + "grad_norm": 2.3657541275024414, + "learning_rate": 0.00014131304940988713, + "loss": 0.3439, + "step": 22800 + }, + { + "epoch": 0.8807289856751226, + "grad_norm": 0.8959445953369141, + "learning_rate": 0.00014128730839028535, + "loss": 0.2896, + "step": 22810 + }, + { + "epoch": 0.8811151009691494, + "grad_norm": 1.5202107429504395, + "learning_rate": 0.00014126156737068356, + "loss": 0.1951, + "step": 22820 + }, + { + "epoch": 0.8815012162631762, + "grad_norm": 1.3710687160491943, + "learning_rate": 0.00014123582635108177, + "loss": 0.3193, + "step": 22830 + }, + { + "epoch": 0.881887331557203, + "grad_norm": 2.18868088722229, + "learning_rate": 0.00014121008533147999, + "loss": 0.3535, + "step": 22840 + }, + { + "epoch": 0.8822734468512298, + "grad_norm": 0.8251023888587952, + "learning_rate": 0.0001411843443118782, + "loss": 0.2536, + "step": 22850 + }, + { + "epoch": 0.8826595621452565, + "grad_norm": 1.0674525499343872, + "learning_rate": 0.0001411586032922764, + "loss": 0.3482, + "step": 22860 + }, + { + "epoch": 0.8830456774392834, + "grad_norm": 2.1199145317077637, + "learning_rate": 0.00014113286227267463, + "loss": 0.201, + "step": 22870 + }, + { + "epoch": 0.8834317927333102, + "grad_norm": 1.1958723068237305, + "learning_rate": 0.00014110712125307284, + "loss": 0.2813, + "step": 22880 + }, + { + "epoch": 0.883817908027337, + "grad_norm": 1.7805982828140259, + "learning_rate": 0.00014108138023347105, + "loss": 0.5317, + "step": 22890 + }, + { + "epoch": 0.8842040233213637, + "grad_norm": 1.9648222923278809, + "learning_rate": 0.00014105563921386926, + "loss": 0.3022, + "step": 22900 + }, + { + "epoch": 0.8845901386153906, + "grad_norm": 0.9053369164466858, + "learning_rate": 0.00014102989819426748, + "loss": 0.2886, + "step": 22910 + }, + { + "epoch": 0.8849762539094174, + "grad_norm": 1.251861333847046, + "learning_rate": 0.0001410041571746657, + "loss": 0.2632, + "step": 22920 + }, + { + "epoch": 0.8853623692034441, + "grad_norm": 3.8411691188812256, + "learning_rate": 0.0001409784161550639, + "loss": 0.3056, + "step": 22930 + }, + { + "epoch": 0.8857484844974709, + "grad_norm": 0.5969072580337524, + "learning_rate": 0.00014095267513546212, + "loss": 0.3806, + "step": 22940 + }, + { + "epoch": 0.8861345997914978, + "grad_norm": 4.688140392303467, + "learning_rate": 0.00014092693411586033, + "loss": 0.3318, + "step": 22950 + }, + { + "epoch": 0.8865207150855245, + "grad_norm": 4.2694993019104, + "learning_rate": 0.00014090119309625854, + "loss": 0.34, + "step": 22960 + }, + { + "epoch": 0.8869068303795513, + "grad_norm": 2.4169955253601074, + "learning_rate": 0.00014087545207665676, + "loss": 0.466, + "step": 22970 + }, + { + "epoch": 0.8872929456735781, + "grad_norm": 0.044384077191352844, + "learning_rate": 0.00014084971105705497, + "loss": 0.3196, + "step": 22980 + }, + { + "epoch": 0.887679060967605, + "grad_norm": 0.6931707262992859, + "learning_rate": 0.0001408239700374532, + "loss": 0.2166, + "step": 22990 + }, + { + "epoch": 0.8880651762616317, + "grad_norm": 3.195596933364868, + "learning_rate": 0.0001407982290178514, + "loss": 0.404, + "step": 23000 + }, + { + "epoch": 0.8884512915556585, + "grad_norm": 2.055058002471924, + "learning_rate": 0.0001407724879982496, + "loss": 0.3685, + "step": 23010 + }, + { + "epoch": 0.8888374068496853, + "grad_norm": 0.7302665114402771, + "learning_rate": 0.00014074674697864782, + "loss": 0.2247, + "step": 23020 + }, + { + "epoch": 0.8892235221437121, + "grad_norm": 1.1183364391326904, + "learning_rate": 0.00014072100595904604, + "loss": 0.3699, + "step": 23030 + }, + { + "epoch": 0.8896096374377389, + "grad_norm": 1.040553092956543, + "learning_rate": 0.00014069526493944425, + "loss": 0.1791, + "step": 23040 + }, + { + "epoch": 0.8899957527317657, + "grad_norm": 1.5321402549743652, + "learning_rate": 0.00014066952391984246, + "loss": 0.2533, + "step": 23050 + }, + { + "epoch": 0.8903818680257926, + "grad_norm": 0.298433780670166, + "learning_rate": 0.0001406437829002407, + "loss": 0.2291, + "step": 23060 + }, + { + "epoch": 0.8907679833198193, + "grad_norm": 2.563689947128296, + "learning_rate": 0.0001406180418806389, + "loss": 0.4175, + "step": 23070 + }, + { + "epoch": 0.8911540986138461, + "grad_norm": 3.0614495277404785, + "learning_rate": 0.00014059230086103713, + "loss": 0.2875, + "step": 23080 + }, + { + "epoch": 0.8915402139078729, + "grad_norm": 0.4387970268726349, + "learning_rate": 0.00014056655984143532, + "loss": 0.2982, + "step": 23090 + }, + { + "epoch": 0.8919263292018997, + "grad_norm": 2.5590367317199707, + "learning_rate": 0.00014054081882183353, + "loss": 0.2858, + "step": 23100 + }, + { + "epoch": 0.8923124444959265, + "grad_norm": 0.6369298696517944, + "learning_rate": 0.00014051507780223174, + "loss": 0.2889, + "step": 23110 + }, + { + "epoch": 0.8926985597899533, + "grad_norm": 0.5210187435150146, + "learning_rate": 0.00014048933678262996, + "loss": 0.2442, + "step": 23120 + }, + { + "epoch": 0.89308467508398, + "grad_norm": 3.5073516368865967, + "learning_rate": 0.0001404635957630282, + "loss": 0.1733, + "step": 23130 + }, + { + "epoch": 0.8934707903780069, + "grad_norm": 2.1915247440338135, + "learning_rate": 0.00014043785474342638, + "loss": 0.4282, + "step": 23140 + }, + { + "epoch": 0.8938569056720337, + "grad_norm": 0.8182128071784973, + "learning_rate": 0.00014041211372382462, + "loss": 0.5501, + "step": 23150 + }, + { + "epoch": 0.8942430209660605, + "grad_norm": 0.4261817932128906, + "learning_rate": 0.0001403863727042228, + "loss": 0.2192, + "step": 23160 + }, + { + "epoch": 0.8946291362600872, + "grad_norm": 1.244523525238037, + "learning_rate": 0.00014036063168462102, + "loss": 0.2909, + "step": 23170 + }, + { + "epoch": 0.8950152515541141, + "grad_norm": 1.1659152507781982, + "learning_rate": 0.00014033489066501926, + "loss": 0.3119, + "step": 23180 + }, + { + "epoch": 0.8954013668481409, + "grad_norm": 0.4217310845851898, + "learning_rate": 0.00014030914964541745, + "loss": 0.2454, + "step": 23190 + }, + { + "epoch": 0.8957874821421676, + "grad_norm": 1.5685316324234009, + "learning_rate": 0.0001402834086258157, + "loss": 0.3609, + "step": 23200 + }, + { + "epoch": 0.8961735974361944, + "grad_norm": 2.6524040699005127, + "learning_rate": 0.00014025766760621388, + "loss": 0.2508, + "step": 23210 + }, + { + "epoch": 0.8965597127302213, + "grad_norm": 2.4932234287261963, + "learning_rate": 0.00014023192658661212, + "loss": 0.4047, + "step": 23220 + }, + { + "epoch": 0.896945828024248, + "grad_norm": 0.5093832015991211, + "learning_rate": 0.0001402061855670103, + "loss": 0.1639, + "step": 23230 + }, + { + "epoch": 0.8973319433182748, + "grad_norm": 1.1632994413375854, + "learning_rate": 0.00014018044454740852, + "loss": 0.2457, + "step": 23240 + }, + { + "epoch": 0.8977180586123016, + "grad_norm": 2.181727647781372, + "learning_rate": 0.00014015470352780676, + "loss": 0.3784, + "step": 23250 + }, + { + "epoch": 0.8981041739063285, + "grad_norm": 3.8110599517822266, + "learning_rate": 0.00014012896250820494, + "loss": 0.282, + "step": 23260 + }, + { + "epoch": 0.8984902892003552, + "grad_norm": 2.8994619846343994, + "learning_rate": 0.00014010322148860318, + "loss": 0.3801, + "step": 23270 + }, + { + "epoch": 0.898876404494382, + "grad_norm": 1.2624458074569702, + "learning_rate": 0.00014007748046900137, + "loss": 0.3718, + "step": 23280 + }, + { + "epoch": 0.8992625197884088, + "grad_norm": 1.5995053052902222, + "learning_rate": 0.0001400517394493996, + "loss": 0.1828, + "step": 23290 + }, + { + "epoch": 0.8996486350824356, + "grad_norm": 2.251941680908203, + "learning_rate": 0.0001400259984297978, + "loss": 0.4434, + "step": 23300 + }, + { + "epoch": 0.9000347503764624, + "grad_norm": 1.1319392919540405, + "learning_rate": 0.000140000257410196, + "loss": 0.2479, + "step": 23310 + }, + { + "epoch": 0.9004208656704892, + "grad_norm": 2.075227737426758, + "learning_rate": 0.00013997451639059425, + "loss": 0.3079, + "step": 23320 + }, + { + "epoch": 0.9008069809645161, + "grad_norm": 0.6504748463630676, + "learning_rate": 0.00013994877537099244, + "loss": 0.2899, + "step": 23330 + }, + { + "epoch": 0.9011930962585428, + "grad_norm": 2.1623177528381348, + "learning_rate": 0.00013992303435139068, + "loss": 0.2651, + "step": 23340 + }, + { + "epoch": 0.9015792115525696, + "grad_norm": 2.159290313720703, + "learning_rate": 0.00013989729333178886, + "loss": 0.2975, + "step": 23350 + }, + { + "epoch": 0.9019653268465964, + "grad_norm": 0.7650458216667175, + "learning_rate": 0.0001398715523121871, + "loss": 0.2699, + "step": 23360 + }, + { + "epoch": 0.9023514421406232, + "grad_norm": 4.838365077972412, + "learning_rate": 0.00013984581129258532, + "loss": 0.4633, + "step": 23370 + }, + { + "epoch": 0.90273755743465, + "grad_norm": 4.598055362701416, + "learning_rate": 0.0001398200702729835, + "loss": 0.5255, + "step": 23380 + }, + { + "epoch": 0.9031236727286768, + "grad_norm": 0.9883280396461487, + "learning_rate": 0.00013979432925338174, + "loss": 0.5096, + "step": 23390 + }, + { + "epoch": 0.9035097880227035, + "grad_norm": 2.1574087142944336, + "learning_rate": 0.00013976858823377993, + "loss": 0.2999, + "step": 23400 + }, + { + "epoch": 0.9038959033167304, + "grad_norm": 1.7071588039398193, + "learning_rate": 0.00013974284721417817, + "loss": 0.4066, + "step": 23410 + }, + { + "epoch": 0.9042820186107572, + "grad_norm": 1.5206272602081299, + "learning_rate": 0.00013971710619457636, + "loss": 0.2841, + "step": 23420 + }, + { + "epoch": 0.904668133904784, + "grad_norm": 0.8649633526802063, + "learning_rate": 0.0001396913651749746, + "loss": 0.2263, + "step": 23430 + }, + { + "epoch": 0.9050542491988107, + "grad_norm": 0.35130754113197327, + "learning_rate": 0.0001396656241553728, + "loss": 0.3575, + "step": 23440 + }, + { + "epoch": 0.9054403644928376, + "grad_norm": 0.6659330725669861, + "learning_rate": 0.000139639883135771, + "loss": 0.2895, + "step": 23450 + }, + { + "epoch": 0.9058264797868644, + "grad_norm": 1.1387370824813843, + "learning_rate": 0.00013961414211616924, + "loss": 0.3574, + "step": 23460 + }, + { + "epoch": 0.9062125950808911, + "grad_norm": 1.8786828517913818, + "learning_rate": 0.00013958840109656742, + "loss": 0.5127, + "step": 23470 + }, + { + "epoch": 0.9065987103749179, + "grad_norm": 1.1299179792404175, + "learning_rate": 0.00013956266007696566, + "loss": 0.2215, + "step": 23480 + }, + { + "epoch": 0.9069848256689448, + "grad_norm": 1.1256846189498901, + "learning_rate": 0.00013953691905736388, + "loss": 0.302, + "step": 23490 + }, + { + "epoch": 0.9073709409629716, + "grad_norm": 0.8697860836982727, + "learning_rate": 0.0001395111780377621, + "loss": 0.2846, + "step": 23500 + }, + { + "epoch": 0.9077570562569983, + "grad_norm": 1.4939324855804443, + "learning_rate": 0.0001394854370181603, + "loss": 0.2376, + "step": 23510 + }, + { + "epoch": 0.9081431715510251, + "grad_norm": 0.5483170747756958, + "learning_rate": 0.00013945969599855852, + "loss": 0.2546, + "step": 23520 + }, + { + "epoch": 0.908529286845052, + "grad_norm": 1.0681931972503662, + "learning_rate": 0.00013943395497895673, + "loss": 0.2236, + "step": 23530 + }, + { + "epoch": 0.9089154021390787, + "grad_norm": 1.9246234893798828, + "learning_rate": 0.00013940821395935492, + "loss": 0.3332, + "step": 23540 + }, + { + "epoch": 0.9093015174331055, + "grad_norm": 6.114970684051514, + "learning_rate": 0.00013938247293975316, + "loss": 0.3265, + "step": 23550 + }, + { + "epoch": 0.9096876327271323, + "grad_norm": 2.369112968444824, + "learning_rate": 0.00013935673192015137, + "loss": 0.3105, + "step": 23560 + }, + { + "epoch": 0.9100737480211591, + "grad_norm": 4.402872562408447, + "learning_rate": 0.00013933099090054958, + "loss": 0.3496, + "step": 23570 + }, + { + "epoch": 0.9104598633151859, + "grad_norm": 0.6064890027046204, + "learning_rate": 0.0001393052498809478, + "loss": 0.1322, + "step": 23580 + }, + { + "epoch": 0.9108459786092127, + "grad_norm": 0.41702714562416077, + "learning_rate": 0.000139279508861346, + "loss": 0.0886, + "step": 23590 + }, + { + "epoch": 0.9112320939032396, + "grad_norm": 1.1597472429275513, + "learning_rate": 0.00013925376784174422, + "loss": 0.1967, + "step": 23600 + }, + { + "epoch": 0.9116182091972663, + "grad_norm": 1.1049001216888428, + "learning_rate": 0.0001392280268221424, + "loss": 0.2802, + "step": 23610 + }, + { + "epoch": 0.9120043244912931, + "grad_norm": 0.7986807227134705, + "learning_rate": 0.00013920228580254065, + "loss": 0.1872, + "step": 23620 + }, + { + "epoch": 0.9123904397853199, + "grad_norm": 0.548693060874939, + "learning_rate": 0.00013917654478293886, + "loss": 0.3561, + "step": 23630 + }, + { + "epoch": 0.9127765550793467, + "grad_norm": 1.5944240093231201, + "learning_rate": 0.00013915080376333708, + "loss": 0.4619, + "step": 23640 + }, + { + "epoch": 0.9131626703733735, + "grad_norm": 1.9891632795333862, + "learning_rate": 0.0001391250627437353, + "loss": 0.3883, + "step": 23650 + }, + { + "epoch": 0.9135487856674003, + "grad_norm": 0.4564145803451538, + "learning_rate": 0.0001390993217241335, + "loss": 0.233, + "step": 23660 + }, + { + "epoch": 0.913934900961427, + "grad_norm": 1.1683684587478638, + "learning_rate": 0.00013907358070453172, + "loss": 0.4692, + "step": 23670 + }, + { + "epoch": 0.9143210162554539, + "grad_norm": 5.883500099182129, + "learning_rate": 0.00013904783968492993, + "loss": 0.2134, + "step": 23680 + }, + { + "epoch": 0.9147071315494807, + "grad_norm": 0.7426010370254517, + "learning_rate": 0.00013902209866532814, + "loss": 0.3608, + "step": 23690 + }, + { + "epoch": 0.9150932468435075, + "grad_norm": 1.5476068258285522, + "learning_rate": 0.00013899635764572636, + "loss": 0.2194, + "step": 23700 + }, + { + "epoch": 0.9154793621375342, + "grad_norm": 1.5702605247497559, + "learning_rate": 0.00013897061662612457, + "loss": 0.1731, + "step": 23710 + }, + { + "epoch": 0.9158654774315611, + "grad_norm": 2.336073637008667, + "learning_rate": 0.00013894487560652278, + "loss": 0.2703, + "step": 23720 + }, + { + "epoch": 0.9162515927255879, + "grad_norm": 0.4154629111289978, + "learning_rate": 0.000138919134586921, + "loss": 0.2601, + "step": 23730 + }, + { + "epoch": 0.9166377080196146, + "grad_norm": 1.9994091987609863, + "learning_rate": 0.0001388933935673192, + "loss": 0.4536, + "step": 23740 + }, + { + "epoch": 0.9170238233136414, + "grad_norm": 0.4610597491264343, + "learning_rate": 0.00013886765254771742, + "loss": 0.2726, + "step": 23750 + }, + { + "epoch": 0.9174099386076683, + "grad_norm": 2.19671893119812, + "learning_rate": 0.00013884191152811564, + "loss": 0.3418, + "step": 23760 + }, + { + "epoch": 0.917796053901695, + "grad_norm": 0.619023323059082, + "learning_rate": 0.00013881617050851385, + "loss": 0.2761, + "step": 23770 + }, + { + "epoch": 0.9181821691957218, + "grad_norm": 1.667083978652954, + "learning_rate": 0.00013879042948891206, + "loss": 0.2341, + "step": 23780 + }, + { + "epoch": 0.9185682844897486, + "grad_norm": 0.349020391702652, + "learning_rate": 0.00013876468846931028, + "loss": 0.244, + "step": 23790 + }, + { + "epoch": 0.9189543997837755, + "grad_norm": 3.2495415210723877, + "learning_rate": 0.0001387389474497085, + "loss": 0.2454, + "step": 23800 + }, + { + "epoch": 0.9193405150778022, + "grad_norm": 0.7900146842002869, + "learning_rate": 0.0001387132064301067, + "loss": 0.3209, + "step": 23810 + }, + { + "epoch": 0.919726630371829, + "grad_norm": 1.2435237169265747, + "learning_rate": 0.00013868746541050492, + "loss": 0.3719, + "step": 23820 + }, + { + "epoch": 0.9201127456658558, + "grad_norm": 0.7372536659240723, + "learning_rate": 0.00013866172439090313, + "loss": 0.2984, + "step": 23830 + }, + { + "epoch": 0.9204988609598826, + "grad_norm": 2.814180374145508, + "learning_rate": 0.00013863598337130134, + "loss": 0.5081, + "step": 23840 + }, + { + "epoch": 0.9208849762539094, + "grad_norm": 3.5411558151245117, + "learning_rate": 0.00013861024235169956, + "loss": 0.4475, + "step": 23850 + }, + { + "epoch": 0.9212710915479362, + "grad_norm": 0.41628485918045044, + "learning_rate": 0.00013858450133209777, + "loss": 0.1679, + "step": 23860 + }, + { + "epoch": 0.9216572068419631, + "grad_norm": 0.7951272130012512, + "learning_rate": 0.00013855876031249598, + "loss": 0.4346, + "step": 23870 + }, + { + "epoch": 0.9220433221359898, + "grad_norm": 0.6857497692108154, + "learning_rate": 0.0001385330192928942, + "loss": 0.2262, + "step": 23880 + }, + { + "epoch": 0.9224294374300166, + "grad_norm": 2.732487678527832, + "learning_rate": 0.0001385072782732924, + "loss": 0.3198, + "step": 23890 + }, + { + "epoch": 0.9228155527240434, + "grad_norm": 0.18741728365421295, + "learning_rate": 0.00013848153725369062, + "loss": 0.2409, + "step": 23900 + }, + { + "epoch": 0.9232016680180702, + "grad_norm": 0.46343281865119934, + "learning_rate": 0.00013845579623408884, + "loss": 0.2379, + "step": 23910 + }, + { + "epoch": 0.923587783312097, + "grad_norm": 1.7090940475463867, + "learning_rate": 0.00013843005521448705, + "loss": 0.2274, + "step": 23920 + }, + { + "epoch": 0.9239738986061238, + "grad_norm": 0.2000303715467453, + "learning_rate": 0.00013840431419488526, + "loss": 0.5171, + "step": 23930 + }, + { + "epoch": 0.9243600139001505, + "grad_norm": 1.6152868270874023, + "learning_rate": 0.00013837857317528348, + "loss": 0.1348, + "step": 23940 + }, + { + "epoch": 0.9247461291941774, + "grad_norm": 1.5346245765686035, + "learning_rate": 0.0001383528321556817, + "loss": 0.3708, + "step": 23950 + }, + { + "epoch": 0.9251322444882042, + "grad_norm": 2.1073787212371826, + "learning_rate": 0.0001383270911360799, + "loss": 0.3694, + "step": 23960 + }, + { + "epoch": 0.925518359782231, + "grad_norm": 12.8298921585083, + "learning_rate": 0.00013830135011647812, + "loss": 0.269, + "step": 23970 + }, + { + "epoch": 0.9259044750762577, + "grad_norm": 0.43689021468162537, + "learning_rate": 0.00013827560909687633, + "loss": 0.3099, + "step": 23980 + }, + { + "epoch": 0.9262905903702846, + "grad_norm": 2.084096908569336, + "learning_rate": 0.00013824986807727457, + "loss": 0.4423, + "step": 23990 + }, + { + "epoch": 0.9266767056643114, + "grad_norm": 0.9367966651916504, + "learning_rate": 0.00013822412705767276, + "loss": 0.2202, + "step": 24000 + }, + { + "epoch": 0.9270628209583381, + "grad_norm": 0.14286178350448608, + "learning_rate": 0.00013819838603807097, + "loss": 0.2302, + "step": 24010 + }, + { + "epoch": 0.9274489362523649, + "grad_norm": 0.7110779285430908, + "learning_rate": 0.00013817264501846918, + "loss": 0.3598, + "step": 24020 + }, + { + "epoch": 0.9278350515463918, + "grad_norm": 2.352980136871338, + "learning_rate": 0.0001381469039988674, + "loss": 0.4493, + "step": 24030 + }, + { + "epoch": 0.9282211668404186, + "grad_norm": 2.2235450744628906, + "learning_rate": 0.0001381211629792656, + "loss": 0.547, + "step": 24040 + }, + { + "epoch": 0.9286072821344453, + "grad_norm": 2.4419260025024414, + "learning_rate": 0.00013809542195966382, + "loss": 0.4612, + "step": 24050 + }, + { + "epoch": 0.9289933974284721, + "grad_norm": 1.3784935474395752, + "learning_rate": 0.00013806968094006206, + "loss": 0.3996, + "step": 24060 + }, + { + "epoch": 0.929379512722499, + "grad_norm": 1.019810676574707, + "learning_rate": 0.00013804393992046025, + "loss": 0.5654, + "step": 24070 + }, + { + "epoch": 0.9297656280165257, + "grad_norm": 2.399096965789795, + "learning_rate": 0.00013801819890085846, + "loss": 0.3335, + "step": 24080 + }, + { + "epoch": 0.9301517433105525, + "grad_norm": 0.28834161162376404, + "learning_rate": 0.00013799245788125668, + "loss": 0.4946, + "step": 24090 + }, + { + "epoch": 0.9305378586045794, + "grad_norm": 0.10426662117242813, + "learning_rate": 0.0001379667168616549, + "loss": 0.3275, + "step": 24100 + }, + { + "epoch": 0.9309239738986061, + "grad_norm": 2.7809340953826904, + "learning_rate": 0.0001379409758420531, + "loss": 0.3549, + "step": 24110 + }, + { + "epoch": 0.9313100891926329, + "grad_norm": 1.8708065748214722, + "learning_rate": 0.00013791523482245132, + "loss": 0.1429, + "step": 24120 + }, + { + "epoch": 0.9316962044866597, + "grad_norm": 1.9992274045944214, + "learning_rate": 0.00013788949380284956, + "loss": 0.2818, + "step": 24130 + }, + { + "epoch": 0.9320823197806866, + "grad_norm": 1.3665111064910889, + "learning_rate": 0.00013786375278324774, + "loss": 0.3565, + "step": 24140 + }, + { + "epoch": 0.9324684350747133, + "grad_norm": 3.106234550476074, + "learning_rate": 0.00013783801176364596, + "loss": 0.3706, + "step": 24150 + }, + { + "epoch": 0.9328545503687401, + "grad_norm": 1.2186559438705444, + "learning_rate": 0.00013781227074404417, + "loss": 0.1616, + "step": 24160 + }, + { + "epoch": 0.9332406656627669, + "grad_norm": 0.41551148891448975, + "learning_rate": 0.00013778652972444238, + "loss": 0.3073, + "step": 24170 + }, + { + "epoch": 0.9336267809567937, + "grad_norm": 2.015069007873535, + "learning_rate": 0.00013776078870484062, + "loss": 0.3317, + "step": 24180 + }, + { + "epoch": 0.9340128962508205, + "grad_norm": 1.9990328550338745, + "learning_rate": 0.0001377350476852388, + "loss": 0.4937, + "step": 24190 + }, + { + "epoch": 0.9343990115448473, + "grad_norm": 2.351898670196533, + "learning_rate": 0.00013770930666563705, + "loss": 0.3994, + "step": 24200 + }, + { + "epoch": 0.934785126838874, + "grad_norm": 1.4670008420944214, + "learning_rate": 0.00013768356564603524, + "loss": 0.2905, + "step": 24210 + }, + { + "epoch": 0.9351712421329009, + "grad_norm": 0.9890618920326233, + "learning_rate": 0.00013765782462643345, + "loss": 0.2512, + "step": 24220 + }, + { + "epoch": 0.9355573574269277, + "grad_norm": 0.3020402491092682, + "learning_rate": 0.00013763208360683166, + "loss": 0.3701, + "step": 24230 + }, + { + "epoch": 0.9359434727209545, + "grad_norm": 0.42218661308288574, + "learning_rate": 0.00013760634258722988, + "loss": 0.3395, + "step": 24240 + }, + { + "epoch": 0.9363295880149812, + "grad_norm": 1.5767306089401245, + "learning_rate": 0.00013758060156762812, + "loss": 0.3941, + "step": 24250 + }, + { + "epoch": 0.9367157033090081, + "grad_norm": 0.5416197180747986, + "learning_rate": 0.0001375548605480263, + "loss": 0.2915, + "step": 24260 + }, + { + "epoch": 0.9371018186030349, + "grad_norm": 1.175347924232483, + "learning_rate": 0.00013752911952842454, + "loss": 0.1284, + "step": 24270 + }, + { + "epoch": 0.9374879338970616, + "grad_norm": 0.8719255924224854, + "learning_rate": 0.00013750337850882273, + "loss": 0.3144, + "step": 24280 + }, + { + "epoch": 0.9378740491910884, + "grad_norm": 0.2711631655693054, + "learning_rate": 0.00013747763748922097, + "loss": 0.1561, + "step": 24290 + }, + { + "epoch": 0.9382601644851153, + "grad_norm": 3.2228004932403564, + "learning_rate": 0.00013745189646961918, + "loss": 0.5494, + "step": 24300 + }, + { + "epoch": 0.9386462797791421, + "grad_norm": 0.05647150054574013, + "learning_rate": 0.00013742615545001737, + "loss": 0.3571, + "step": 24310 + }, + { + "epoch": 0.9390323950731688, + "grad_norm": 0.8459005951881409, + "learning_rate": 0.0001374004144304156, + "loss": 0.1948, + "step": 24320 + }, + { + "epoch": 0.9394185103671956, + "grad_norm": 0.7044252157211304, + "learning_rate": 0.0001373746734108138, + "loss": 0.2511, + "step": 24330 + }, + { + "epoch": 0.9398046256612225, + "grad_norm": 0.979590654373169, + "learning_rate": 0.00013734893239121204, + "loss": 0.2158, + "step": 24340 + }, + { + "epoch": 0.9401907409552492, + "grad_norm": 1.0585628747940063, + "learning_rate": 0.00013732319137161022, + "loss": 0.1339, + "step": 24350 + }, + { + "epoch": 0.940576856249276, + "grad_norm": 0.6700488328933716, + "learning_rate": 0.00013729745035200846, + "loss": 0.2056, + "step": 24360 + }, + { + "epoch": 0.9409629715433029, + "grad_norm": 1.6656709909439087, + "learning_rate": 0.00013727170933240668, + "loss": 0.2957, + "step": 24370 + }, + { + "epoch": 0.9413490868373297, + "grad_norm": 2.963427782058716, + "learning_rate": 0.00013724596831280486, + "loss": 0.2673, + "step": 24380 + }, + { + "epoch": 0.9417352021313564, + "grad_norm": 0.8103615641593933, + "learning_rate": 0.0001372202272932031, + "loss": 0.372, + "step": 24390 + }, + { + "epoch": 0.9421213174253832, + "grad_norm": 2.3593873977661133, + "learning_rate": 0.0001371944862736013, + "loss": 0.431, + "step": 24400 + }, + { + "epoch": 0.9425074327194101, + "grad_norm": 2.8472931385040283, + "learning_rate": 0.00013716874525399953, + "loss": 0.4584, + "step": 24410 + }, + { + "epoch": 0.9428935480134368, + "grad_norm": 1.2765402793884277, + "learning_rate": 0.00013714300423439772, + "loss": 0.2543, + "step": 24420 + }, + { + "epoch": 0.9432796633074636, + "grad_norm": 1.4226797819137573, + "learning_rate": 0.00013711726321479596, + "loss": 0.2914, + "step": 24430 + }, + { + "epoch": 0.9436657786014904, + "grad_norm": 6.906572341918945, + "learning_rate": 0.00013709152219519417, + "loss": 0.4415, + "step": 24440 + }, + { + "epoch": 0.9440518938955172, + "grad_norm": 1.8387972116470337, + "learning_rate": 0.00013706578117559236, + "loss": 0.3018, + "step": 24450 + }, + { + "epoch": 0.944438009189544, + "grad_norm": 0.7259104251861572, + "learning_rate": 0.0001370400401559906, + "loss": 0.2356, + "step": 24460 + }, + { + "epoch": 0.9448241244835708, + "grad_norm": 0.6452949643135071, + "learning_rate": 0.00013701429913638878, + "loss": 0.2382, + "step": 24470 + }, + { + "epoch": 0.9452102397775975, + "grad_norm": 4.259208679199219, + "learning_rate": 0.00013698855811678702, + "loss": 0.4736, + "step": 24480 + }, + { + "epoch": 0.9455963550716244, + "grad_norm": 2.7305455207824707, + "learning_rate": 0.00013696281709718524, + "loss": 0.5034, + "step": 24490 + }, + { + "epoch": 0.9459824703656512, + "grad_norm": 0.6123724579811096, + "learning_rate": 0.00013693707607758345, + "loss": 0.3638, + "step": 24500 + }, + { + "epoch": 0.946368585659678, + "grad_norm": 0.9821889400482178, + "learning_rate": 0.00013691133505798166, + "loss": 0.3081, + "step": 24510 + }, + { + "epoch": 0.9467547009537047, + "grad_norm": 2.0870277881622314, + "learning_rate": 0.00013688559403837985, + "loss": 0.3116, + "step": 24520 + }, + { + "epoch": 0.9471408162477316, + "grad_norm": 2.495162010192871, + "learning_rate": 0.0001368598530187781, + "loss": 0.3879, + "step": 24530 + }, + { + "epoch": 0.9475269315417584, + "grad_norm": 1.4834142923355103, + "learning_rate": 0.00013683411199917628, + "loss": 0.1975, + "step": 24540 + }, + { + "epoch": 0.9479130468357851, + "grad_norm": 0.10831606388092041, + "learning_rate": 0.00013680837097957452, + "loss": 0.3504, + "step": 24550 + }, + { + "epoch": 0.9482991621298119, + "grad_norm": 1.6975635290145874, + "learning_rate": 0.00013678262995997273, + "loss": 0.3305, + "step": 24560 + }, + { + "epoch": 0.9486852774238388, + "grad_norm": 1.982422947883606, + "learning_rate": 0.00013675688894037094, + "loss": 0.3693, + "step": 24570 + }, + { + "epoch": 0.9490713927178656, + "grad_norm": 0.3487630784511566, + "learning_rate": 0.00013673114792076916, + "loss": 0.1912, + "step": 24580 + }, + { + "epoch": 0.9494575080118923, + "grad_norm": 0.4546245336532593, + "learning_rate": 0.00013670540690116734, + "loss": 0.2519, + "step": 24590 + }, + { + "epoch": 0.9498436233059191, + "grad_norm": 0.42953622341156006, + "learning_rate": 0.00013667966588156558, + "loss": 0.1967, + "step": 24600 + }, + { + "epoch": 0.950229738599946, + "grad_norm": 0.8197507858276367, + "learning_rate": 0.00013665392486196377, + "loss": 0.3057, + "step": 24610 + }, + { + "epoch": 0.9506158538939727, + "grad_norm": 0.8627083897590637, + "learning_rate": 0.000136628183842362, + "loss": 0.1382, + "step": 24620 + }, + { + "epoch": 0.9510019691879995, + "grad_norm": 1.0003200769424438, + "learning_rate": 0.00013660244282276022, + "loss": 0.1608, + "step": 24630 + }, + { + "epoch": 0.9513880844820264, + "grad_norm": 0.4473998546600342, + "learning_rate": 0.00013657670180315844, + "loss": 0.2226, + "step": 24640 + }, + { + "epoch": 0.9517741997760532, + "grad_norm": 1.9413338899612427, + "learning_rate": 0.00013655096078355665, + "loss": 0.2323, + "step": 24650 + }, + { + "epoch": 0.9521603150700799, + "grad_norm": 1.3250267505645752, + "learning_rate": 0.00013652521976395483, + "loss": 0.1617, + "step": 24660 + }, + { + "epoch": 0.9525464303641067, + "grad_norm": 1.756535530090332, + "learning_rate": 0.00013649947874435308, + "loss": 0.2727, + "step": 24670 + }, + { + "epoch": 0.9529325456581336, + "grad_norm": 1.1905356645584106, + "learning_rate": 0.0001364737377247513, + "loss": 0.3169, + "step": 24680 + }, + { + "epoch": 0.9533186609521603, + "grad_norm": 0.8116361498832703, + "learning_rate": 0.0001364479967051495, + "loss": 0.3999, + "step": 24690 + }, + { + "epoch": 0.9537047762461871, + "grad_norm": 0.8906353712081909, + "learning_rate": 0.00013642225568554772, + "loss": 0.3499, + "step": 24700 + }, + { + "epoch": 0.9540908915402139, + "grad_norm": 0.7422589659690857, + "learning_rate": 0.00013639651466594593, + "loss": 0.3544, + "step": 24710 + }, + { + "epoch": 0.9544770068342407, + "grad_norm": 0.21925519406795502, + "learning_rate": 0.00013637077364634414, + "loss": 0.1957, + "step": 24720 + }, + { + "epoch": 0.9548631221282675, + "grad_norm": 3.2993857860565186, + "learning_rate": 0.00013634503262674236, + "loss": 0.2625, + "step": 24730 + }, + { + "epoch": 0.9552492374222943, + "grad_norm": 1.4352943897247314, + "learning_rate": 0.00013631929160714057, + "loss": 0.1655, + "step": 24740 + }, + { + "epoch": 0.955635352716321, + "grad_norm": 1.4417182207107544, + "learning_rate": 0.00013629355058753878, + "loss": 0.2442, + "step": 24750 + }, + { + "epoch": 0.9560214680103479, + "grad_norm": 0.5024278163909912, + "learning_rate": 0.000136267809567937, + "loss": 0.2035, + "step": 24760 + }, + { + "epoch": 0.9564075833043747, + "grad_norm": 1.553472638130188, + "learning_rate": 0.0001362420685483352, + "loss": 0.22, + "step": 24770 + }, + { + "epoch": 0.9567936985984015, + "grad_norm": 1.655155897140503, + "learning_rate": 0.00013621632752873342, + "loss": 0.4051, + "step": 24780 + }, + { + "epoch": 0.9571798138924282, + "grad_norm": 0.4435586631298065, + "learning_rate": 0.00013619058650913164, + "loss": 0.3032, + "step": 24790 + }, + { + "epoch": 0.9575659291864551, + "grad_norm": 0.38589465618133545, + "learning_rate": 0.00013616484548952985, + "loss": 0.2062, + "step": 24800 + }, + { + "epoch": 0.9579520444804819, + "grad_norm": 0.6986583471298218, + "learning_rate": 0.00013613910446992806, + "loss": 0.6371, + "step": 24810 + }, + { + "epoch": 0.9583381597745086, + "grad_norm": 1.69257652759552, + "learning_rate": 0.00013611336345032627, + "loss": 0.2425, + "step": 24820 + }, + { + "epoch": 0.9587242750685354, + "grad_norm": 0.6136781573295593, + "learning_rate": 0.0001360876224307245, + "loss": 0.313, + "step": 24830 + }, + { + "epoch": 0.9591103903625623, + "grad_norm": 0.9019533395767212, + "learning_rate": 0.0001360618814111227, + "loss": 0.3723, + "step": 24840 + }, + { + "epoch": 0.9594965056565891, + "grad_norm": 4.601387977600098, + "learning_rate": 0.00013603614039152091, + "loss": 0.4228, + "step": 24850 + }, + { + "epoch": 0.9598826209506158, + "grad_norm": 0.35048994421958923, + "learning_rate": 0.00013601039937191913, + "loss": 0.2423, + "step": 24860 + }, + { + "epoch": 0.9602687362446426, + "grad_norm": 1.7177300453186035, + "learning_rate": 0.00013598465835231734, + "loss": 0.3995, + "step": 24870 + }, + { + "epoch": 0.9606548515386695, + "grad_norm": 1.3341178894042969, + "learning_rate": 0.00013595891733271555, + "loss": 0.3422, + "step": 24880 + }, + { + "epoch": 0.9610409668326962, + "grad_norm": 1.1859056949615479, + "learning_rate": 0.00013593317631311377, + "loss": 0.4204, + "step": 24890 + }, + { + "epoch": 0.961427082126723, + "grad_norm": 1.9831022024154663, + "learning_rate": 0.00013590743529351198, + "loss": 0.3623, + "step": 24900 + }, + { + "epoch": 0.9618131974207499, + "grad_norm": 0.8704162240028381, + "learning_rate": 0.0001358816942739102, + "loss": 0.2664, + "step": 24910 + }, + { + "epoch": 0.9621993127147767, + "grad_norm": 2.2464160919189453, + "learning_rate": 0.0001358559532543084, + "loss": 0.3628, + "step": 24920 + }, + { + "epoch": 0.9625854280088034, + "grad_norm": 2.5081570148468018, + "learning_rate": 0.00013583021223470662, + "loss": 0.314, + "step": 24930 + }, + { + "epoch": 0.9629715433028302, + "grad_norm": 4.44802713394165, + "learning_rate": 0.00013580447121510483, + "loss": 0.4593, + "step": 24940 + }, + { + "epoch": 0.9633576585968571, + "grad_norm": 2.0449907779693604, + "learning_rate": 0.00013577873019550305, + "loss": 0.3941, + "step": 24950 + }, + { + "epoch": 0.9637437738908838, + "grad_norm": 2.090315818786621, + "learning_rate": 0.00013575298917590126, + "loss": 0.3713, + "step": 24960 + }, + { + "epoch": 0.9641298891849106, + "grad_norm": 1.0789872407913208, + "learning_rate": 0.00013572724815629947, + "loss": 0.2411, + "step": 24970 + }, + { + "epoch": 0.9645160044789374, + "grad_norm": 2.0463109016418457, + "learning_rate": 0.0001357015071366977, + "loss": 0.2935, + "step": 24980 + }, + { + "epoch": 0.9649021197729643, + "grad_norm": 1.331299901008606, + "learning_rate": 0.00013567576611709593, + "loss": 0.2162, + "step": 24990 + }, + { + "epoch": 0.965288235066991, + "grad_norm": 3.3949038982391357, + "learning_rate": 0.00013565002509749411, + "loss": 0.4593, + "step": 25000 + }, + { + "epoch": 0.9656743503610178, + "grad_norm": 2.4510934352874756, + "learning_rate": 0.00013562428407789233, + "loss": 0.2476, + "step": 25010 + }, + { + "epoch": 0.9660604656550446, + "grad_norm": 1.1333917379379272, + "learning_rate": 0.00013559854305829054, + "loss": 0.3035, + "step": 25020 + }, + { + "epoch": 0.9664465809490714, + "grad_norm": 2.5525829792022705, + "learning_rate": 0.00013557280203868875, + "loss": 0.4498, + "step": 25030 + }, + { + "epoch": 0.9668326962430982, + "grad_norm": 1.4862838983535767, + "learning_rate": 0.00013554706101908697, + "loss": 0.3109, + "step": 25040 + }, + { + "epoch": 0.967218811537125, + "grad_norm": 1.0053727626800537, + "learning_rate": 0.00013552131999948518, + "loss": 0.3029, + "step": 25050 + }, + { + "epoch": 0.9676049268311517, + "grad_norm": 1.4033957719802856, + "learning_rate": 0.00013549557897988342, + "loss": 0.3096, + "step": 25060 + }, + { + "epoch": 0.9679910421251786, + "grad_norm": 2.1944756507873535, + "learning_rate": 0.0001354698379602816, + "loss": 0.3428, + "step": 25070 + }, + { + "epoch": 0.9683771574192054, + "grad_norm": 3.330143928527832, + "learning_rate": 0.00013544409694067982, + "loss": 0.4026, + "step": 25080 + }, + { + "epoch": 0.9687632727132321, + "grad_norm": 1.9337730407714844, + "learning_rate": 0.00013541835592107803, + "loss": 0.5343, + "step": 25090 + }, + { + "epoch": 0.9691493880072589, + "grad_norm": 4.203855514526367, + "learning_rate": 0.00013539261490147625, + "loss": 0.4461, + "step": 25100 + }, + { + "epoch": 0.9695355033012858, + "grad_norm": 0.6582885980606079, + "learning_rate": 0.0001353668738818745, + "loss": 0.2719, + "step": 25110 + }, + { + "epoch": 0.9699216185953126, + "grad_norm": 0.8788600564002991, + "learning_rate": 0.00013534113286227267, + "loss": 0.4335, + "step": 25120 + }, + { + "epoch": 0.9703077338893393, + "grad_norm": 1.3793160915374756, + "learning_rate": 0.00013531539184267091, + "loss": 0.3126, + "step": 25130 + }, + { + "epoch": 0.9706938491833662, + "grad_norm": 3.5996806621551514, + "learning_rate": 0.0001352896508230691, + "loss": 0.5171, + "step": 25140 + }, + { + "epoch": 0.971079964477393, + "grad_norm": 1.6220872402191162, + "learning_rate": 0.00013526390980346731, + "loss": 0.3992, + "step": 25150 + }, + { + "epoch": 0.9714660797714197, + "grad_norm": 1.8351634740829468, + "learning_rate": 0.00013523816878386553, + "loss": 0.3159, + "step": 25160 + }, + { + "epoch": 0.9718521950654465, + "grad_norm": 0.6400974988937378, + "learning_rate": 0.00013521242776426374, + "loss": 0.3187, + "step": 25170 + }, + { + "epoch": 0.9722383103594734, + "grad_norm": 1.3507485389709473, + "learning_rate": 0.00013518668674466198, + "loss": 0.3626, + "step": 25180 + }, + { + "epoch": 0.9726244256535002, + "grad_norm": 1.4778717756271362, + "learning_rate": 0.00013516094572506017, + "loss": 0.3467, + "step": 25190 + }, + { + "epoch": 0.9730105409475269, + "grad_norm": 0.4346179664134979, + "learning_rate": 0.0001351352047054584, + "loss": 0.1995, + "step": 25200 + }, + { + "epoch": 0.9733966562415537, + "grad_norm": 2.8404130935668945, + "learning_rate": 0.0001351094636858566, + "loss": 0.4765, + "step": 25210 + }, + { + "epoch": 0.9737827715355806, + "grad_norm": 0.044492240995168686, + "learning_rate": 0.0001350837226662548, + "loss": 0.3047, + "step": 25220 + }, + { + "epoch": 0.9741688868296073, + "grad_norm": 2.258355140686035, + "learning_rate": 0.00013505798164665302, + "loss": 0.234, + "step": 25230 + }, + { + "epoch": 0.9745550021236341, + "grad_norm": 0.23794110119342804, + "learning_rate": 0.00013503224062705123, + "loss": 0.5178, + "step": 25240 + }, + { + "epoch": 0.9749411174176609, + "grad_norm": 0.5849624872207642, + "learning_rate": 0.00013500649960744947, + "loss": 0.3419, + "step": 25250 + }, + { + "epoch": 0.9753272327116878, + "grad_norm": 1.535228967666626, + "learning_rate": 0.00013498075858784766, + "loss": 0.275, + "step": 25260 + }, + { + "epoch": 0.9757133480057145, + "grad_norm": 4.943759441375732, + "learning_rate": 0.0001349550175682459, + "loss": 0.241, + "step": 25270 + }, + { + "epoch": 0.9760994632997413, + "grad_norm": 1.3046916723251343, + "learning_rate": 0.0001349292765486441, + "loss": 0.2453, + "step": 25280 + }, + { + "epoch": 0.976485578593768, + "grad_norm": 0.20991156995296478, + "learning_rate": 0.0001349035355290423, + "loss": 0.392, + "step": 25290 + }, + { + "epoch": 0.9768716938877949, + "grad_norm": 1.2106267213821411, + "learning_rate": 0.00013487779450944054, + "loss": 0.284, + "step": 25300 + }, + { + "epoch": 0.9772578091818217, + "grad_norm": 0.28197771310806274, + "learning_rate": 0.00013485205348983873, + "loss": 0.2596, + "step": 25310 + }, + { + "epoch": 0.9776439244758485, + "grad_norm": 0.8538393378257751, + "learning_rate": 0.00013482631247023697, + "loss": 0.2519, + "step": 25320 + }, + { + "epoch": 0.9780300397698752, + "grad_norm": 1.9520586729049683, + "learning_rate": 0.00013480057145063515, + "loss": 0.2581, + "step": 25330 + }, + { + "epoch": 0.9784161550639021, + "grad_norm": 0.7613987326622009, + "learning_rate": 0.0001347748304310334, + "loss": 0.1549, + "step": 25340 + }, + { + "epoch": 0.9788022703579289, + "grad_norm": 0.2045626938343048, + "learning_rate": 0.00013474908941143158, + "loss": 0.2587, + "step": 25350 + }, + { + "epoch": 0.9791883856519557, + "grad_norm": 1.634488821029663, + "learning_rate": 0.0001347233483918298, + "loss": 0.2266, + "step": 25360 + }, + { + "epoch": 0.9795745009459824, + "grad_norm": 0.6475266814231873, + "learning_rate": 0.00013469760737222803, + "loss": 0.2548, + "step": 25370 + }, + { + "epoch": 0.9799606162400093, + "grad_norm": 0.25982800126075745, + "learning_rate": 0.00013467186635262622, + "loss": 0.3398, + "step": 25380 + }, + { + "epoch": 0.9803467315340361, + "grad_norm": 2.3229706287384033, + "learning_rate": 0.00013464612533302446, + "loss": 0.3468, + "step": 25390 + }, + { + "epoch": 0.9807328468280628, + "grad_norm": 0.6465128660202026, + "learning_rate": 0.00013462038431342265, + "loss": 0.2716, + "step": 25400 + }, + { + "epoch": 0.9811189621220897, + "grad_norm": 1.413368821144104, + "learning_rate": 0.0001345946432938209, + "loss": 0.2508, + "step": 25410 + }, + { + "epoch": 0.9815050774161165, + "grad_norm": 0.33577996492385864, + "learning_rate": 0.00013456890227421907, + "loss": 0.3323, + "step": 25420 + }, + { + "epoch": 0.9818911927101432, + "grad_norm": 0.7601230144500732, + "learning_rate": 0.0001345431612546173, + "loss": 0.2682, + "step": 25430 + }, + { + "epoch": 0.98227730800417, + "grad_norm": 4.296112060546875, + "learning_rate": 0.00013451742023501553, + "loss": 0.2886, + "step": 25440 + }, + { + "epoch": 0.9826634232981969, + "grad_norm": 1.2557302713394165, + "learning_rate": 0.00013449167921541371, + "loss": 0.2863, + "step": 25450 + }, + { + "epoch": 0.9830495385922237, + "grad_norm": 0.6168705821037292, + "learning_rate": 0.00013446593819581195, + "loss": 0.195, + "step": 25460 + }, + { + "epoch": 0.9834356538862504, + "grad_norm": 1.9064022302627563, + "learning_rate": 0.00013444019717621014, + "loss": 0.2898, + "step": 25470 + }, + { + "epoch": 0.9838217691802772, + "grad_norm": 0.3549353778362274, + "learning_rate": 0.00013441445615660838, + "loss": 0.1761, + "step": 25480 + }, + { + "epoch": 0.9842078844743041, + "grad_norm": 0.6308786869049072, + "learning_rate": 0.0001343887151370066, + "loss": 0.2169, + "step": 25490 + }, + { + "epoch": 0.9845939997683308, + "grad_norm": 0.9449920058250427, + "learning_rate": 0.0001343629741174048, + "loss": 0.2952, + "step": 25500 + }, + { + "epoch": 0.9849801150623576, + "grad_norm": 1.6993355751037598, + "learning_rate": 0.00013433723309780302, + "loss": 0.3745, + "step": 25510 + }, + { + "epoch": 0.9853662303563844, + "grad_norm": 0.8093920350074768, + "learning_rate": 0.0001343114920782012, + "loss": 0.1725, + "step": 25520 + }, + { + "epoch": 0.9857523456504113, + "grad_norm": 1.4968618154525757, + "learning_rate": 0.00013428575105859945, + "loss": 0.2843, + "step": 25530 + }, + { + "epoch": 0.986138460944438, + "grad_norm": 0.37341028451919556, + "learning_rate": 0.00013426001003899763, + "loss": 0.2462, + "step": 25540 + }, + { + "epoch": 0.9865245762384648, + "grad_norm": 2.0109541416168213, + "learning_rate": 0.00013423426901939587, + "loss": 0.4046, + "step": 25550 + }, + { + "epoch": 0.9869106915324916, + "grad_norm": 2.540151357650757, + "learning_rate": 0.0001342085279997941, + "loss": 0.4331, + "step": 25560 + }, + { + "epoch": 0.9872968068265184, + "grad_norm": 0.9178367257118225, + "learning_rate": 0.0001341827869801923, + "loss": 0.3849, + "step": 25570 + }, + { + "epoch": 0.9876829221205452, + "grad_norm": 1.4229514598846436, + "learning_rate": 0.00013415704596059051, + "loss": 0.326, + "step": 25580 + }, + { + "epoch": 0.988069037414572, + "grad_norm": 0.7699927091598511, + "learning_rate": 0.0001341313049409887, + "loss": 0.237, + "step": 25590 + }, + { + "epoch": 0.9884551527085987, + "grad_norm": 0.3460877239704132, + "learning_rate": 0.00013410556392138694, + "loss": 0.2679, + "step": 25600 + }, + { + "epoch": 0.9888412680026256, + "grad_norm": 0.25363796949386597, + "learning_rate": 0.00013407982290178515, + "loss": 0.1746, + "step": 25610 + }, + { + "epoch": 0.9892273832966524, + "grad_norm": 1.5607961416244507, + "learning_rate": 0.00013405408188218337, + "loss": 0.6328, + "step": 25620 + }, + { + "epoch": 0.9896134985906792, + "grad_norm": 2.3751626014709473, + "learning_rate": 0.00013402834086258158, + "loss": 0.3989, + "step": 25630 + }, + { + "epoch": 0.9899996138847059, + "grad_norm": 0.20423173904418945, + "learning_rate": 0.0001340025998429798, + "loss": 0.2836, + "step": 25640 + }, + { + "epoch": 0.9903857291787328, + "grad_norm": 1.4207524061203003, + "learning_rate": 0.000133976858823378, + "loss": 0.348, + "step": 25650 + }, + { + "epoch": 0.9907718444727596, + "grad_norm": 0.12217597663402557, + "learning_rate": 0.0001339511178037762, + "loss": 0.3515, + "step": 25660 + }, + { + "epoch": 0.9911579597667863, + "grad_norm": 0.9259626269340515, + "learning_rate": 0.00013392537678417443, + "loss": 0.3039, + "step": 25670 + }, + { + "epoch": 0.9915440750608132, + "grad_norm": 0.5700181722640991, + "learning_rate": 0.00013389963576457265, + "loss": 0.3266, + "step": 25680 + }, + { + "epoch": 0.99193019035484, + "grad_norm": 1.9731560945510864, + "learning_rate": 0.00013387389474497086, + "loss": 0.2534, + "step": 25690 + }, + { + "epoch": 0.9923163056488667, + "grad_norm": 1.3714967966079712, + "learning_rate": 0.00013384815372536907, + "loss": 0.298, + "step": 25700 + }, + { + "epoch": 0.9927024209428935, + "grad_norm": 1.2377171516418457, + "learning_rate": 0.0001338224127057673, + "loss": 0.2948, + "step": 25710 + }, + { + "epoch": 0.9930885362369204, + "grad_norm": 0.4711095690727234, + "learning_rate": 0.0001337966716861655, + "loss": 0.2295, + "step": 25720 + }, + { + "epoch": 0.9934746515309472, + "grad_norm": 0.46715909242630005, + "learning_rate": 0.00013377093066656369, + "loss": 0.3331, + "step": 25730 + }, + { + "epoch": 0.9938607668249739, + "grad_norm": 1.1080710887908936, + "learning_rate": 0.00013374518964696193, + "loss": 0.4324, + "step": 25740 + }, + { + "epoch": 0.9942468821190007, + "grad_norm": 2.3581650257110596, + "learning_rate": 0.00013371944862736014, + "loss": 0.372, + "step": 25750 + }, + { + "epoch": 0.9946329974130276, + "grad_norm": 2.448678970336914, + "learning_rate": 0.00013369370760775835, + "loss": 0.3396, + "step": 25760 + }, + { + "epoch": 0.9950191127070543, + "grad_norm": 1.4270198345184326, + "learning_rate": 0.00013366796658815657, + "loss": 0.4511, + "step": 25770 + }, + { + "epoch": 0.9954052280010811, + "grad_norm": 0.6360304951667786, + "learning_rate": 0.00013364222556855478, + "loss": 0.2427, + "step": 25780 + }, + { + "epoch": 0.9957913432951079, + "grad_norm": 2.1653332710266113, + "learning_rate": 0.000133616484548953, + "loss": 0.3057, + "step": 25790 + }, + { + "epoch": 0.9961774585891348, + "grad_norm": 2.952923536300659, + "learning_rate": 0.0001335907435293512, + "loss": 0.3076, + "step": 25800 + }, + { + "epoch": 0.9965635738831615, + "grad_norm": 0.5913527607917786, + "learning_rate": 0.00013356500250974942, + "loss": 0.399, + "step": 25810 + }, + { + "epoch": 0.9969496891771883, + "grad_norm": 0.4653400480747223, + "learning_rate": 0.00013353926149014763, + "loss": 0.2452, + "step": 25820 + }, + { + "epoch": 0.9973358044712151, + "grad_norm": 6.321722984313965, + "learning_rate": 0.00013351352047054585, + "loss": 0.3644, + "step": 25830 + }, + { + "epoch": 0.9977219197652419, + "grad_norm": 1.9944865703582764, + "learning_rate": 0.00013348777945094406, + "loss": 0.2915, + "step": 25840 + }, + { + "epoch": 0.9981080350592687, + "grad_norm": 1.1486843824386597, + "learning_rate": 0.00013346203843134227, + "loss": 0.3787, + "step": 25850 + }, + { + "epoch": 0.9984941503532955, + "grad_norm": 0.11532440781593323, + "learning_rate": 0.0001334362974117405, + "loss": 0.3053, + "step": 25860 + }, + { + "epoch": 0.9988802656473222, + "grad_norm": 1.3178479671478271, + "learning_rate": 0.0001334105563921387, + "loss": 0.2917, + "step": 25870 + }, + { + "epoch": 0.9992663809413491, + "grad_norm": 0.3511134088039398, + "learning_rate": 0.0001333848153725369, + "loss": 0.2657, + "step": 25880 + }, + { + "epoch": 0.9996524962353759, + "grad_norm": 1.2761729955673218, + "learning_rate": 0.00013335907435293513, + "loss": 0.3913, + "step": 25890 + }, + { + "epoch": 1.0000386115294027, + "grad_norm": 2.540947437286377, + "learning_rate": 0.00013333333333333334, + "loss": 0.3016, + "step": 25900 + }, + { + "epoch": 1.0004247268234294, + "grad_norm": 0.5865538120269775, + "learning_rate": 0.00013330759231373155, + "loss": 0.4184, + "step": 25910 + }, + { + "epoch": 1.0008108421174562, + "grad_norm": 2.7305166721343994, + "learning_rate": 0.00013328185129412977, + "loss": 0.3529, + "step": 25920 + }, + { + "epoch": 1.0011969574114832, + "grad_norm": 1.4203829765319824, + "learning_rate": 0.00013325611027452798, + "loss": 0.3345, + "step": 25930 + }, + { + "epoch": 1.00158307270551, + "grad_norm": 0.7687380313873291, + "learning_rate": 0.0001332303692549262, + "loss": 0.2622, + "step": 25940 + }, + { + "epoch": 1.0019691879995367, + "grad_norm": 0.4958217144012451, + "learning_rate": 0.0001332046282353244, + "loss": 0.3935, + "step": 25950 + }, + { + "epoch": 1.0023553032935635, + "grad_norm": 0.27102500200271606, + "learning_rate": 0.00013317888721572262, + "loss": 0.2642, + "step": 25960 + }, + { + "epoch": 1.0027414185875902, + "grad_norm": 0.6760912537574768, + "learning_rate": 0.00013315314619612083, + "loss": 0.1937, + "step": 25970 + }, + { + "epoch": 1.003127533881617, + "grad_norm": 2.5647270679473877, + "learning_rate": 0.00013312740517651905, + "loss": 0.3344, + "step": 25980 + }, + { + "epoch": 1.0035136491756438, + "grad_norm": 0.4810403883457184, + "learning_rate": 0.00013310166415691726, + "loss": 0.1783, + "step": 25990 + }, + { + "epoch": 1.0038997644696706, + "grad_norm": 2.5404248237609863, + "learning_rate": 0.00013307592313731547, + "loss": 0.3979, + "step": 26000 + }, + { + "epoch": 1.0042858797636975, + "grad_norm": 1.1135408878326416, + "learning_rate": 0.00013305018211771369, + "loss": 0.3681, + "step": 26010 + }, + { + "epoch": 1.0046719950577243, + "grad_norm": 1.2810723781585693, + "learning_rate": 0.0001330244410981119, + "loss": 0.183, + "step": 26020 + }, + { + "epoch": 1.005058110351751, + "grad_norm": 3.3486454486846924, + "learning_rate": 0.0001329987000785101, + "loss": 0.2489, + "step": 26030 + }, + { + "epoch": 1.0054442256457778, + "grad_norm": 0.7915325164794922, + "learning_rate": 0.00013297295905890833, + "loss": 0.3577, + "step": 26040 + }, + { + "epoch": 1.0058303409398046, + "grad_norm": 0.969727098941803, + "learning_rate": 0.00013294721803930654, + "loss": 0.2243, + "step": 26050 + }, + { + "epoch": 1.0062164562338314, + "grad_norm": 1.8932983875274658, + "learning_rate": 0.00013292147701970475, + "loss": 0.245, + "step": 26060 + }, + { + "epoch": 1.0066025715278581, + "grad_norm": 1.2421804666519165, + "learning_rate": 0.00013289573600010297, + "loss": 0.2087, + "step": 26070 + }, + { + "epoch": 1.006988686821885, + "grad_norm": 1.316405177116394, + "learning_rate": 0.00013286999498050118, + "loss": 0.2864, + "step": 26080 + }, + { + "epoch": 1.007374802115912, + "grad_norm": 1.9196691513061523, + "learning_rate": 0.0001328442539608994, + "loss": 0.2098, + "step": 26090 + }, + { + "epoch": 1.0077609174099387, + "grad_norm": 0.7522671222686768, + "learning_rate": 0.0001328185129412976, + "loss": 0.3092, + "step": 26100 + }, + { + "epoch": 1.0081470327039654, + "grad_norm": 0.322963684797287, + "learning_rate": 0.00013279277192169585, + "loss": 0.2452, + "step": 26110 + }, + { + "epoch": 1.0085331479979922, + "grad_norm": 2.243734121322632, + "learning_rate": 0.00013276703090209403, + "loss": 0.4387, + "step": 26120 + }, + { + "epoch": 1.008919263292019, + "grad_norm": 1.0588726997375488, + "learning_rate": 0.00013274128988249225, + "loss": 0.3326, + "step": 26130 + }, + { + "epoch": 1.0093053785860457, + "grad_norm": 0.7018478512763977, + "learning_rate": 0.00013271554886289046, + "loss": 0.4248, + "step": 26140 + }, + { + "epoch": 1.0096914938800725, + "grad_norm": 3.692521810531616, + "learning_rate": 0.00013268980784328867, + "loss": 0.2452, + "step": 26150 + }, + { + "epoch": 1.0100776091740993, + "grad_norm": 2.21842885017395, + "learning_rate": 0.00013266406682368689, + "loss": 0.3315, + "step": 26160 + }, + { + "epoch": 1.0104637244681263, + "grad_norm": 1.2321841716766357, + "learning_rate": 0.0001326383258040851, + "loss": 0.1042, + "step": 26170 + }, + { + "epoch": 1.010849839762153, + "grad_norm": 1.6151124238967896, + "learning_rate": 0.00013261258478448334, + "loss": 0.1934, + "step": 26180 + }, + { + "epoch": 1.0112359550561798, + "grad_norm": 0.0949881374835968, + "learning_rate": 0.00013258684376488153, + "loss": 0.1628, + "step": 26190 + }, + { + "epoch": 1.0116220703502066, + "grad_norm": 1.7337597608566284, + "learning_rate": 0.00013256110274527977, + "loss": 0.3275, + "step": 26200 + }, + { + "epoch": 1.0120081856442333, + "grad_norm": 2.1338372230529785, + "learning_rate": 0.00013253536172567795, + "loss": 0.3677, + "step": 26210 + }, + { + "epoch": 1.01239430093826, + "grad_norm": 1.803187370300293, + "learning_rate": 0.00013250962070607617, + "loss": 0.2886, + "step": 26220 + }, + { + "epoch": 1.0127804162322869, + "grad_norm": 2.022825002670288, + "learning_rate": 0.00013248387968647438, + "loss": 0.2618, + "step": 26230 + }, + { + "epoch": 1.0131665315263139, + "grad_norm": 0.48369279503822327, + "learning_rate": 0.0001324581386668726, + "loss": 0.4638, + "step": 26240 + }, + { + "epoch": 1.0135526468203406, + "grad_norm": 3.2530572414398193, + "learning_rate": 0.00013243239764727083, + "loss": 0.3824, + "step": 26250 + }, + { + "epoch": 1.0139387621143674, + "grad_norm": 1.0877609252929688, + "learning_rate": 0.00013240665662766902, + "loss": 0.1732, + "step": 26260 + }, + { + "epoch": 1.0143248774083942, + "grad_norm": 1.5585906505584717, + "learning_rate": 0.00013238091560806726, + "loss": 0.2044, + "step": 26270 + }, + { + "epoch": 1.014710992702421, + "grad_norm": 1.5326491594314575, + "learning_rate": 0.00013235517458846545, + "loss": 0.1759, + "step": 26280 + }, + { + "epoch": 1.0150971079964477, + "grad_norm": 4.376593589782715, + "learning_rate": 0.00013232943356886366, + "loss": 0.6032, + "step": 26290 + }, + { + "epoch": 1.0154832232904745, + "grad_norm": 0.8953253030776978, + "learning_rate": 0.0001323036925492619, + "loss": 0.4057, + "step": 26300 + }, + { + "epoch": 1.0158693385845012, + "grad_norm": 1.271932601928711, + "learning_rate": 0.00013227795152966009, + "loss": 0.1802, + "step": 26310 + }, + { + "epoch": 1.0162554538785282, + "grad_norm": 2.311713457107544, + "learning_rate": 0.00013225221051005833, + "loss": 0.3368, + "step": 26320 + }, + { + "epoch": 1.016641569172555, + "grad_norm": 1.386100172996521, + "learning_rate": 0.0001322264694904565, + "loss": 0.3101, + "step": 26330 + }, + { + "epoch": 1.0170276844665818, + "grad_norm": 1.371382236480713, + "learning_rate": 0.00013220072847085475, + "loss": 0.3804, + "step": 26340 + }, + { + "epoch": 1.0174137997606085, + "grad_norm": 0.7098391652107239, + "learning_rate": 0.00013217498745125294, + "loss": 0.2513, + "step": 26350 + }, + { + "epoch": 1.0177999150546353, + "grad_norm": 3.5874531269073486, + "learning_rate": 0.00013214924643165115, + "loss": 0.2467, + "step": 26360 + }, + { + "epoch": 1.018186030348662, + "grad_norm": 1.9853413105010986, + "learning_rate": 0.0001321235054120494, + "loss": 0.2255, + "step": 26370 + }, + { + "epoch": 1.0185721456426888, + "grad_norm": 0.5550156831741333, + "learning_rate": 0.00013209776439244758, + "loss": 0.1558, + "step": 26380 + }, + { + "epoch": 1.0189582609367156, + "grad_norm": 1.92972731590271, + "learning_rate": 0.00013207202337284582, + "loss": 0.4843, + "step": 26390 + }, + { + "epoch": 1.0193443762307426, + "grad_norm": 2.331674814224243, + "learning_rate": 0.000132046282353244, + "loss": 0.2652, + "step": 26400 + }, + { + "epoch": 1.0197304915247694, + "grad_norm": 0.809916615486145, + "learning_rate": 0.00013202054133364225, + "loss": 0.1824, + "step": 26410 + }, + { + "epoch": 1.0201166068187961, + "grad_norm": 2.6432926654815674, + "learning_rate": 0.00013199480031404046, + "loss": 0.2712, + "step": 26420 + }, + { + "epoch": 1.020502722112823, + "grad_norm": 1.6016955375671387, + "learning_rate": 0.00013196905929443865, + "loss": 0.1629, + "step": 26430 + }, + { + "epoch": 1.0208888374068497, + "grad_norm": 0.22881706058979034, + "learning_rate": 0.00013194331827483689, + "loss": 0.2977, + "step": 26440 + }, + { + "epoch": 1.0212749527008764, + "grad_norm": 1.3551994562149048, + "learning_rate": 0.00013191757725523507, + "loss": 0.1207, + "step": 26450 + }, + { + "epoch": 1.0216610679949032, + "grad_norm": 0.19062986969947815, + "learning_rate": 0.0001318918362356333, + "loss": 0.2201, + "step": 26460 + }, + { + "epoch": 1.0220471832889302, + "grad_norm": 2.014047145843506, + "learning_rate": 0.0001318660952160315, + "loss": 0.4991, + "step": 26470 + }, + { + "epoch": 1.022433298582957, + "grad_norm": 0.39503228664398193, + "learning_rate": 0.00013184035419642974, + "loss": 0.3509, + "step": 26480 + }, + { + "epoch": 1.0228194138769837, + "grad_norm": 4.807271957397461, + "learning_rate": 0.00013181461317682795, + "loss": 0.3324, + "step": 26490 + }, + { + "epoch": 1.0232055291710105, + "grad_norm": 1.4785593748092651, + "learning_rate": 0.00013178887215722614, + "loss": 0.269, + "step": 26500 + }, + { + "epoch": 1.0235916444650373, + "grad_norm": 1.851137399673462, + "learning_rate": 0.00013176313113762438, + "loss": 0.2739, + "step": 26510 + }, + { + "epoch": 1.023977759759064, + "grad_norm": 0.6200979351997375, + "learning_rate": 0.00013173739011802257, + "loss": 0.3135, + "step": 26520 + }, + { + "epoch": 1.0243638750530908, + "grad_norm": 2.411592960357666, + "learning_rate": 0.0001317116490984208, + "loss": 0.1675, + "step": 26530 + }, + { + "epoch": 1.0247499903471176, + "grad_norm": 0.6965230703353882, + "learning_rate": 0.000131685908078819, + "loss": 0.2762, + "step": 26540 + }, + { + "epoch": 1.0251361056411445, + "grad_norm": 1.3620637655258179, + "learning_rate": 0.00013166016705921723, + "loss": 0.319, + "step": 26550 + }, + { + "epoch": 1.0255222209351713, + "grad_norm": 1.4016404151916504, + "learning_rate": 0.00013163442603961545, + "loss": 0.1871, + "step": 26560 + }, + { + "epoch": 1.025908336229198, + "grad_norm": 1.234718680381775, + "learning_rate": 0.00013160868502001363, + "loss": 0.1301, + "step": 26570 + }, + { + "epoch": 1.0262944515232248, + "grad_norm": 1.4657102823257446, + "learning_rate": 0.00013158294400041187, + "loss": 0.3814, + "step": 26580 + }, + { + "epoch": 1.0266805668172516, + "grad_norm": 1.606948733329773, + "learning_rate": 0.00013155720298081006, + "loss": 0.2226, + "step": 26590 + }, + { + "epoch": 1.0270666821112784, + "grad_norm": 0.4280283749103546, + "learning_rate": 0.0001315314619612083, + "loss": 0.1506, + "step": 26600 + }, + { + "epoch": 1.0274527974053052, + "grad_norm": 0.141262486577034, + "learning_rate": 0.0001315057209416065, + "loss": 0.1476, + "step": 26610 + }, + { + "epoch": 1.027838912699332, + "grad_norm": 2.7536983489990234, + "learning_rate": 0.00013147997992200473, + "loss": 0.2558, + "step": 26620 + }, + { + "epoch": 1.028225027993359, + "grad_norm": 1.052965760231018, + "learning_rate": 0.00013145423890240294, + "loss": 0.3989, + "step": 26630 + }, + { + "epoch": 1.0286111432873857, + "grad_norm": 0.3157159388065338, + "learning_rate": 0.00013142849788280112, + "loss": 0.2656, + "step": 26640 + }, + { + "epoch": 1.0289972585814124, + "grad_norm": 2.080801248550415, + "learning_rate": 0.00013140275686319937, + "loss": 0.3183, + "step": 26650 + }, + { + "epoch": 1.0293833738754392, + "grad_norm": 1.255540370941162, + "learning_rate": 0.00013137701584359755, + "loss": 0.4555, + "step": 26660 + }, + { + "epoch": 1.029769489169466, + "grad_norm": 1.0504742860794067, + "learning_rate": 0.0001313512748239958, + "loss": 0.17, + "step": 26670 + }, + { + "epoch": 1.0301556044634927, + "grad_norm": 1.8295503854751587, + "learning_rate": 0.000131325533804394, + "loss": 0.2202, + "step": 26680 + }, + { + "epoch": 1.0305417197575195, + "grad_norm": 0.9758415222167969, + "learning_rate": 0.00013129979278479222, + "loss": 0.1187, + "step": 26690 + }, + { + "epoch": 1.0309278350515463, + "grad_norm": 0.6076366901397705, + "learning_rate": 0.00013127405176519043, + "loss": 0.0668, + "step": 26700 + }, + { + "epoch": 1.0313139503455733, + "grad_norm": 0.7663784027099609, + "learning_rate": 0.00013124831074558865, + "loss": 0.2115, + "step": 26710 + }, + { + "epoch": 1.0317000656396, + "grad_norm": 1.814332365989685, + "learning_rate": 0.00013122256972598686, + "loss": 0.0901, + "step": 26720 + }, + { + "epoch": 1.0320861809336268, + "grad_norm": 2.80830454826355, + "learning_rate": 0.00013119682870638504, + "loss": 0.213, + "step": 26730 + }, + { + "epoch": 1.0324722962276536, + "grad_norm": 1.324601411819458, + "learning_rate": 0.00013117108768678328, + "loss": 0.6125, + "step": 26740 + }, + { + "epoch": 1.0328584115216803, + "grad_norm": 1.3301643133163452, + "learning_rate": 0.0001311453466671815, + "loss": 0.1986, + "step": 26750 + }, + { + "epoch": 1.033244526815707, + "grad_norm": 5.361929893493652, + "learning_rate": 0.0001311196056475797, + "loss": 0.5023, + "step": 26760 + }, + { + "epoch": 1.0336306421097339, + "grad_norm": 0.7855739593505859, + "learning_rate": 0.00013109386462797792, + "loss": 0.2549, + "step": 26770 + }, + { + "epoch": 1.0340167574037609, + "grad_norm": 0.05219104886054993, + "learning_rate": 0.00013106812360837614, + "loss": 0.2942, + "step": 26780 + }, + { + "epoch": 1.0344028726977876, + "grad_norm": 0.7680227160453796, + "learning_rate": 0.00013104238258877435, + "loss": 0.2909, + "step": 26790 + }, + { + "epoch": 1.0347889879918144, + "grad_norm": 0.559930682182312, + "learning_rate": 0.00013101664156917256, + "loss": 0.2876, + "step": 26800 + }, + { + "epoch": 1.0351751032858412, + "grad_norm": 0.8780495524406433, + "learning_rate": 0.00013099090054957078, + "loss": 0.2837, + "step": 26810 + }, + { + "epoch": 1.035561218579868, + "grad_norm": 2.0212693214416504, + "learning_rate": 0.000130965159529969, + "loss": 0.3008, + "step": 26820 + }, + { + "epoch": 1.0359473338738947, + "grad_norm": 2.2967641353607178, + "learning_rate": 0.0001309394185103672, + "loss": 0.2797, + "step": 26830 + }, + { + "epoch": 1.0363334491679215, + "grad_norm": 0.33352091908454895, + "learning_rate": 0.00013091367749076542, + "loss": 0.4785, + "step": 26840 + }, + { + "epoch": 1.0367195644619482, + "grad_norm": 1.5413645505905151, + "learning_rate": 0.00013088793647116363, + "loss": 0.3248, + "step": 26850 + }, + { + "epoch": 1.0371056797559752, + "grad_norm": 0.36845988035202026, + "learning_rate": 0.00013086219545156184, + "loss": 0.1723, + "step": 26860 + }, + { + "epoch": 1.037491795050002, + "grad_norm": 1.196103811264038, + "learning_rate": 0.00013083645443196006, + "loss": 0.3623, + "step": 26870 + }, + { + "epoch": 1.0378779103440288, + "grad_norm": 2.4172215461730957, + "learning_rate": 0.00013081071341235827, + "loss": 0.3428, + "step": 26880 + }, + { + "epoch": 1.0382640256380555, + "grad_norm": 1.585368275642395, + "learning_rate": 0.00013078497239275648, + "loss": 0.2791, + "step": 26890 + }, + { + "epoch": 1.0386501409320823, + "grad_norm": 2.8708138465881348, + "learning_rate": 0.0001307592313731547, + "loss": 0.3386, + "step": 26900 + }, + { + "epoch": 1.039036256226109, + "grad_norm": 1.1256765127182007, + "learning_rate": 0.0001307334903535529, + "loss": 0.2992, + "step": 26910 + }, + { + "epoch": 1.0394223715201358, + "grad_norm": 0.9846695065498352, + "learning_rate": 0.00013070774933395112, + "loss": 0.2659, + "step": 26920 + }, + { + "epoch": 1.0398084868141626, + "grad_norm": 1.5781525373458862, + "learning_rate": 0.00013068200831434934, + "loss": 0.2825, + "step": 26930 + }, + { + "epoch": 1.0401946021081896, + "grad_norm": 0.6615175008773804, + "learning_rate": 0.00013065626729474755, + "loss": 0.3608, + "step": 26940 + }, + { + "epoch": 1.0405807174022164, + "grad_norm": 1.2707170248031616, + "learning_rate": 0.00013063052627514576, + "loss": 0.3183, + "step": 26950 + }, + { + "epoch": 1.0409668326962431, + "grad_norm": 2.6396381855010986, + "learning_rate": 0.00013060478525554398, + "loss": 0.284, + "step": 26960 + }, + { + "epoch": 1.04135294799027, + "grad_norm": 1.5508041381835938, + "learning_rate": 0.0001305790442359422, + "loss": 0.2861, + "step": 26970 + }, + { + "epoch": 1.0417390632842967, + "grad_norm": 0.32720935344696045, + "learning_rate": 0.0001305533032163404, + "loss": 0.2784, + "step": 26980 + }, + { + "epoch": 1.0421251785783234, + "grad_norm": 2.3695240020751953, + "learning_rate": 0.00013052756219673862, + "loss": 0.3058, + "step": 26990 + }, + { + "epoch": 1.0425112938723502, + "grad_norm": 1.5404917001724243, + "learning_rate": 0.00013050182117713683, + "loss": 0.3636, + "step": 27000 + }, + { + "epoch": 1.0428974091663772, + "grad_norm": 1.3176417350769043, + "learning_rate": 0.00013047608015753504, + "loss": 0.2995, + "step": 27010 + }, + { + "epoch": 1.043283524460404, + "grad_norm": 1.5777894258499146, + "learning_rate": 0.00013045033913793326, + "loss": 0.3244, + "step": 27020 + }, + { + "epoch": 1.0436696397544307, + "grad_norm": 2.209652900695801, + "learning_rate": 0.00013042459811833147, + "loss": 0.2716, + "step": 27030 + }, + { + "epoch": 1.0440557550484575, + "grad_norm": 1.7991529703140259, + "learning_rate": 0.00013039885709872968, + "loss": 0.2022, + "step": 27040 + }, + { + "epoch": 1.0444418703424843, + "grad_norm": 0.3741607069969177, + "learning_rate": 0.0001303731160791279, + "loss": 0.3808, + "step": 27050 + }, + { + "epoch": 1.044827985636511, + "grad_norm": 0.023122821003198624, + "learning_rate": 0.0001303473750595261, + "loss": 0.3209, + "step": 27060 + }, + { + "epoch": 1.0452141009305378, + "grad_norm": 2.0373151302337646, + "learning_rate": 0.00013032163403992432, + "loss": 0.3105, + "step": 27070 + }, + { + "epoch": 1.0456002162245646, + "grad_norm": 0.5242247581481934, + "learning_rate": 0.00013029589302032254, + "loss": 0.2589, + "step": 27080 + }, + { + "epoch": 1.0459863315185915, + "grad_norm": 0.8598466515541077, + "learning_rate": 0.00013027015200072075, + "loss": 0.3025, + "step": 27090 + }, + { + "epoch": 1.0463724468126183, + "grad_norm": 0.19895502924919128, + "learning_rate": 0.00013024441098111896, + "loss": 0.3278, + "step": 27100 + }, + { + "epoch": 1.046758562106645, + "grad_norm": 1.9250338077545166, + "learning_rate": 0.0001302186699615172, + "loss": 0.3272, + "step": 27110 + }, + { + "epoch": 1.0471446774006719, + "grad_norm": 0.6927512288093567, + "learning_rate": 0.0001301929289419154, + "loss": 0.3256, + "step": 27120 + }, + { + "epoch": 1.0475307926946986, + "grad_norm": 0.5463778376579285, + "learning_rate": 0.0001301671879223136, + "loss": 0.2726, + "step": 27130 + }, + { + "epoch": 1.0479169079887254, + "grad_norm": 0.24690699577331543, + "learning_rate": 0.00013014144690271182, + "loss": 0.1247, + "step": 27140 + }, + { + "epoch": 1.0483030232827522, + "grad_norm": 7.875885009765625, + "learning_rate": 0.00013011570588311003, + "loss": 0.24, + "step": 27150 + }, + { + "epoch": 1.048689138576779, + "grad_norm": 2.0821642875671387, + "learning_rate": 0.00013008996486350824, + "loss": 0.2695, + "step": 27160 + }, + { + "epoch": 1.049075253870806, + "grad_norm": 1.0324410200119019, + "learning_rate": 0.00013006422384390646, + "loss": 0.3156, + "step": 27170 + }, + { + "epoch": 1.0494613691648327, + "grad_norm": 1.3675347566604614, + "learning_rate": 0.0001300384828243047, + "loss": 0.2366, + "step": 27180 + }, + { + "epoch": 1.0498474844588594, + "grad_norm": 0.4514729082584381, + "learning_rate": 0.00013001274180470288, + "loss": 0.1541, + "step": 27190 + }, + { + "epoch": 1.0502335997528862, + "grad_norm": 1.0098782777786255, + "learning_rate": 0.0001299870007851011, + "loss": 0.3059, + "step": 27200 + }, + { + "epoch": 1.050619715046913, + "grad_norm": 0.07944436371326447, + "learning_rate": 0.0001299612597654993, + "loss": 0.1882, + "step": 27210 + }, + { + "epoch": 1.0510058303409398, + "grad_norm": 0.8160178661346436, + "learning_rate": 0.00012993551874589752, + "loss": 0.2657, + "step": 27220 + }, + { + "epoch": 1.0513919456349665, + "grad_norm": 3.881469964981079, + "learning_rate": 0.00012990977772629574, + "loss": 0.4586, + "step": 27230 + }, + { + "epoch": 1.0517780609289935, + "grad_norm": 1.7851450443267822, + "learning_rate": 0.00012988403670669395, + "loss": 0.2945, + "step": 27240 + }, + { + "epoch": 1.0521641762230203, + "grad_norm": 4.176274299621582, + "learning_rate": 0.0001298582956870922, + "loss": 0.3819, + "step": 27250 + }, + { + "epoch": 1.052550291517047, + "grad_norm": 1.43479585647583, + "learning_rate": 0.00012983255466749038, + "loss": 0.3651, + "step": 27260 + }, + { + "epoch": 1.0529364068110738, + "grad_norm": 0.4839598536491394, + "learning_rate": 0.0001298068136478886, + "loss": 0.186, + "step": 27270 + }, + { + "epoch": 1.0533225221051006, + "grad_norm": 0.8487644791603088, + "learning_rate": 0.0001297810726282868, + "loss": 0.0851, + "step": 27280 + }, + { + "epoch": 1.0537086373991273, + "grad_norm": 3.5912392139434814, + "learning_rate": 0.00012975533160868502, + "loss": 0.3962, + "step": 27290 + }, + { + "epoch": 1.0540947526931541, + "grad_norm": 2.4018168449401855, + "learning_rate": 0.00012972959058908326, + "loss": 0.4, + "step": 27300 + }, + { + "epoch": 1.0544808679871809, + "grad_norm": 3.8987746238708496, + "learning_rate": 0.00012970384956948144, + "loss": 0.3081, + "step": 27310 + }, + { + "epoch": 1.0548669832812079, + "grad_norm": 0.4471427798271179, + "learning_rate": 0.00012967810854987968, + "loss": 0.2227, + "step": 27320 + }, + { + "epoch": 1.0552530985752346, + "grad_norm": 0.6207703351974487, + "learning_rate": 0.00012965236753027787, + "loss": 0.2066, + "step": 27330 + }, + { + "epoch": 1.0556392138692614, + "grad_norm": 1.4381637573242188, + "learning_rate": 0.00012962662651067608, + "loss": 0.2598, + "step": 27340 + }, + { + "epoch": 1.0560253291632882, + "grad_norm": 1.6057437658309937, + "learning_rate": 0.0001296008854910743, + "loss": 0.4003, + "step": 27350 + }, + { + "epoch": 1.056411444457315, + "grad_norm": 1.583121418952942, + "learning_rate": 0.0001295751444714725, + "loss": 0.2231, + "step": 27360 + }, + { + "epoch": 1.0567975597513417, + "grad_norm": 0.9260556101799011, + "learning_rate": 0.00012954940345187075, + "loss": 0.3066, + "step": 27370 + }, + { + "epoch": 1.0571836750453685, + "grad_norm": 0.530800461769104, + "learning_rate": 0.00012952366243226894, + "loss": 0.2063, + "step": 27380 + }, + { + "epoch": 1.0575697903393952, + "grad_norm": 0.3256929814815521, + "learning_rate": 0.00012949792141266718, + "loss": 0.2279, + "step": 27390 + }, + { + "epoch": 1.0579559056334222, + "grad_norm": 1.2628639936447144, + "learning_rate": 0.00012947218039306536, + "loss": 0.179, + "step": 27400 + }, + { + "epoch": 1.058342020927449, + "grad_norm": 1.168662190437317, + "learning_rate": 0.0001294464393734636, + "loss": 0.2862, + "step": 27410 + }, + { + "epoch": 1.0587281362214758, + "grad_norm": 3.8283517360687256, + "learning_rate": 0.00012942069835386182, + "loss": 0.3933, + "step": 27420 + }, + { + "epoch": 1.0591142515155025, + "grad_norm": 0.10550712049007416, + "learning_rate": 0.00012939495733426, + "loss": 0.2015, + "step": 27430 + }, + { + "epoch": 1.0595003668095293, + "grad_norm": 0.7177254557609558, + "learning_rate": 0.00012936921631465824, + "loss": 0.2126, + "step": 27440 + }, + { + "epoch": 1.059886482103556, + "grad_norm": 3.3265602588653564, + "learning_rate": 0.00012934347529505643, + "loss": 0.3734, + "step": 27450 + }, + { + "epoch": 1.0602725973975828, + "grad_norm": 0.9886051416397095, + "learning_rate": 0.00012931773427545467, + "loss": 0.2643, + "step": 27460 + }, + { + "epoch": 1.0606587126916098, + "grad_norm": 0.8335347175598145, + "learning_rate": 0.00012929199325585286, + "loss": 0.2044, + "step": 27470 + }, + { + "epoch": 1.0610448279856366, + "grad_norm": 1.16574227809906, + "learning_rate": 0.0001292662522362511, + "loss": 0.2316, + "step": 27480 + }, + { + "epoch": 1.0614309432796634, + "grad_norm": 0.1635606586933136, + "learning_rate": 0.0001292405112166493, + "loss": 0.2477, + "step": 27490 + }, + { + "epoch": 1.0618170585736901, + "grad_norm": 0.6800632476806641, + "learning_rate": 0.0001292147701970475, + "loss": 0.2831, + "step": 27500 + }, + { + "epoch": 1.062203173867717, + "grad_norm": 0.5231989622116089, + "learning_rate": 0.00012918902917744574, + "loss": 0.1621, + "step": 27510 + }, + { + "epoch": 1.0625892891617437, + "grad_norm": 6.668003559112549, + "learning_rate": 0.00012916328815784392, + "loss": 0.2736, + "step": 27520 + }, + { + "epoch": 1.0629754044557704, + "grad_norm": 0.2980963885784149, + "learning_rate": 0.00012913754713824216, + "loss": 0.2563, + "step": 27530 + }, + { + "epoch": 1.0633615197497972, + "grad_norm": 0.8144646883010864, + "learning_rate": 0.00012911180611864035, + "loss": 0.3821, + "step": 27540 + }, + { + "epoch": 1.063747635043824, + "grad_norm": 0.9781578183174133, + "learning_rate": 0.0001290860650990386, + "loss": 0.3653, + "step": 27550 + }, + { + "epoch": 1.064133750337851, + "grad_norm": 1.5652499198913574, + "learning_rate": 0.0001290603240794368, + "loss": 0.325, + "step": 27560 + }, + { + "epoch": 1.0645198656318777, + "grad_norm": 2.707165002822876, + "learning_rate": 0.000129034583059835, + "loss": 0.3129, + "step": 27570 + }, + { + "epoch": 1.0649059809259045, + "grad_norm": 0.35952532291412354, + "learning_rate": 0.00012900884204023323, + "loss": 0.2093, + "step": 27580 + }, + { + "epoch": 1.0652920962199313, + "grad_norm": 1.2863729000091553, + "learning_rate": 0.00012898310102063142, + "loss": 0.1299, + "step": 27590 + }, + { + "epoch": 1.065678211513958, + "grad_norm": 0.8279618620872498, + "learning_rate": 0.00012895736000102966, + "loss": 0.2072, + "step": 27600 + }, + { + "epoch": 1.0660643268079848, + "grad_norm": 0.4006168246269226, + "learning_rate": 0.00012893161898142787, + "loss": 0.3613, + "step": 27610 + }, + { + "epoch": 1.0664504421020116, + "grad_norm": 1.2714260816574097, + "learning_rate": 0.00012890587796182608, + "loss": 0.2013, + "step": 27620 + }, + { + "epoch": 1.0668365573960386, + "grad_norm": 1.9552396535873413, + "learning_rate": 0.0001288801369422243, + "loss": 0.2837, + "step": 27630 + }, + { + "epoch": 1.0672226726900653, + "grad_norm": 2.179871082305908, + "learning_rate": 0.00012885439592262248, + "loss": 0.1521, + "step": 27640 + }, + { + "epoch": 1.067608787984092, + "grad_norm": 1.1471878290176392, + "learning_rate": 0.00012882865490302072, + "loss": 0.2532, + "step": 27650 + }, + { + "epoch": 1.0679949032781189, + "grad_norm": 3.0032637119293213, + "learning_rate": 0.0001288029138834189, + "loss": 0.467, + "step": 27660 + }, + { + "epoch": 1.0683810185721456, + "grad_norm": 2.480180025100708, + "learning_rate": 0.00012877717286381715, + "loss": 0.3601, + "step": 27670 + }, + { + "epoch": 1.0687671338661724, + "grad_norm": 1.86027991771698, + "learning_rate": 0.00012875143184421536, + "loss": 0.3689, + "step": 27680 + }, + { + "epoch": 1.0691532491601992, + "grad_norm": 0.38396087288856506, + "learning_rate": 0.00012872569082461358, + "loss": 0.1401, + "step": 27690 + }, + { + "epoch": 1.0695393644542261, + "grad_norm": 0.8882033824920654, + "learning_rate": 0.0001286999498050118, + "loss": 0.6004, + "step": 27700 + }, + { + "epoch": 1.069925479748253, + "grad_norm": 1.2111278772354126, + "learning_rate": 0.00012867420878540998, + "loss": 0.2371, + "step": 27710 + }, + { + "epoch": 1.0703115950422797, + "grad_norm": 1.7789413928985596, + "learning_rate": 0.00012864846776580822, + "loss": 0.3142, + "step": 27720 + }, + { + "epoch": 1.0706977103363065, + "grad_norm": 1.6160372495651245, + "learning_rate": 0.00012862272674620643, + "loss": 0.3444, + "step": 27730 + }, + { + "epoch": 1.0710838256303332, + "grad_norm": 3.336289167404175, + "learning_rate": 0.00012859698572660464, + "loss": 0.1214, + "step": 27740 + }, + { + "epoch": 1.07146994092436, + "grad_norm": 4.104520320892334, + "learning_rate": 0.00012857124470700286, + "loss": 0.1752, + "step": 27750 + }, + { + "epoch": 1.0718560562183868, + "grad_norm": 0.4429762363433838, + "learning_rate": 0.00012854550368740107, + "loss": 0.5001, + "step": 27760 + }, + { + "epoch": 1.0722421715124135, + "grad_norm": 0.557033121585846, + "learning_rate": 0.00012851976266779928, + "loss": 0.3267, + "step": 27770 + }, + { + "epoch": 1.0726282868064403, + "grad_norm": 1.6847301721572876, + "learning_rate": 0.00012849402164819747, + "loss": 0.2305, + "step": 27780 + }, + { + "epoch": 1.0730144021004673, + "grad_norm": 1.0504320859909058, + "learning_rate": 0.0001284682806285957, + "loss": 0.3443, + "step": 27790 + }, + { + "epoch": 1.073400517394494, + "grad_norm": 2.728804349899292, + "learning_rate": 0.00012844253960899392, + "loss": 0.4083, + "step": 27800 + }, + { + "epoch": 1.0737866326885208, + "grad_norm": 0.6164497137069702, + "learning_rate": 0.00012841679858939214, + "loss": 0.2049, + "step": 27810 + }, + { + "epoch": 1.0741727479825476, + "grad_norm": 2.1831917762756348, + "learning_rate": 0.00012839105756979035, + "loss": 0.1674, + "step": 27820 + }, + { + "epoch": 1.0745588632765743, + "grad_norm": 0.5254467129707336, + "learning_rate": 0.00012836531655018856, + "loss": 0.3456, + "step": 27830 + }, + { + "epoch": 1.0749449785706011, + "grad_norm": 2.920846700668335, + "learning_rate": 0.00012833957553058678, + "loss": 0.4071, + "step": 27840 + }, + { + "epoch": 1.0753310938646279, + "grad_norm": 0.6006580591201782, + "learning_rate": 0.00012831383451098496, + "loss": 0.16, + "step": 27850 + }, + { + "epoch": 1.0757172091586549, + "grad_norm": 1.7163684368133545, + "learning_rate": 0.0001282880934913832, + "loss": 0.1821, + "step": 27860 + }, + { + "epoch": 1.0761033244526816, + "grad_norm": 1.8286449909210205, + "learning_rate": 0.00012826235247178142, + "loss": 0.3079, + "step": 27870 + }, + { + "epoch": 1.0764894397467084, + "grad_norm": 2.5178558826446533, + "learning_rate": 0.00012823661145217963, + "loss": 0.2158, + "step": 27880 + }, + { + "epoch": 1.0768755550407352, + "grad_norm": 0.8985245227813721, + "learning_rate": 0.00012821087043257784, + "loss": 0.198, + "step": 27890 + }, + { + "epoch": 1.077261670334762, + "grad_norm": 0.11768722534179688, + "learning_rate": 0.00012818512941297606, + "loss": 0.1661, + "step": 27900 + }, + { + "epoch": 1.0776477856287887, + "grad_norm": 1.0070226192474365, + "learning_rate": 0.00012815938839337427, + "loss": 0.2843, + "step": 27910 + }, + { + "epoch": 1.0780339009228155, + "grad_norm": 1.6243773698806763, + "learning_rate": 0.00012813364737377248, + "loss": 0.3449, + "step": 27920 + }, + { + "epoch": 1.0784200162168422, + "grad_norm": 2.8661181926727295, + "learning_rate": 0.0001281079063541707, + "loss": 0.4921, + "step": 27930 + }, + { + "epoch": 1.0788061315108692, + "grad_norm": 1.5015594959259033, + "learning_rate": 0.0001280821653345689, + "loss": 0.3347, + "step": 27940 + }, + { + "epoch": 1.079192246804896, + "grad_norm": 1.7244246006011963, + "learning_rate": 0.00012805642431496712, + "loss": 0.3342, + "step": 27950 + }, + { + "epoch": 1.0795783620989228, + "grad_norm": 0.17871785163879395, + "learning_rate": 0.00012803068329536534, + "loss": 0.051, + "step": 27960 + }, + { + "epoch": 1.0799644773929495, + "grad_norm": 1.093429684638977, + "learning_rate": 0.00012800494227576355, + "loss": 0.2461, + "step": 27970 + }, + { + "epoch": 1.0803505926869763, + "grad_norm": 1.0775126218795776, + "learning_rate": 0.00012797920125616176, + "loss": 0.293, + "step": 27980 + }, + { + "epoch": 1.080736707981003, + "grad_norm": 2.0808680057525635, + "learning_rate": 0.00012795346023655998, + "loss": 0.2125, + "step": 27990 + }, + { + "epoch": 1.0811228232750298, + "grad_norm": 3.172473907470703, + "learning_rate": 0.0001279277192169582, + "loss": 0.3289, + "step": 28000 + }, + { + "epoch": 1.0815089385690566, + "grad_norm": 0.8227205872535706, + "learning_rate": 0.0001279019781973564, + "loss": 0.4457, + "step": 28010 + }, + { + "epoch": 1.0818950538630836, + "grad_norm": 0.4987971782684326, + "learning_rate": 0.00012787623717775462, + "loss": 0.2397, + "step": 28020 + }, + { + "epoch": 1.0822811691571104, + "grad_norm": 0.6923367381095886, + "learning_rate": 0.00012785049615815283, + "loss": 0.1833, + "step": 28030 + }, + { + "epoch": 1.0826672844511371, + "grad_norm": 0.3719552457332611, + "learning_rate": 0.00012782475513855104, + "loss": 0.4045, + "step": 28040 + }, + { + "epoch": 1.083053399745164, + "grad_norm": 0.798744261264801, + "learning_rate": 0.00012779901411894926, + "loss": 0.2218, + "step": 28050 + }, + { + "epoch": 1.0834395150391907, + "grad_norm": 0.6289515495300293, + "learning_rate": 0.00012777327309934747, + "loss": 0.2423, + "step": 28060 + }, + { + "epoch": 1.0838256303332174, + "grad_norm": 0.6853532195091248, + "learning_rate": 0.00012774753207974568, + "loss": 0.2759, + "step": 28070 + }, + { + "epoch": 1.0842117456272442, + "grad_norm": 1.3380333185195923, + "learning_rate": 0.0001277217910601439, + "loss": 0.2457, + "step": 28080 + }, + { + "epoch": 1.0845978609212712, + "grad_norm": 1.4076060056686401, + "learning_rate": 0.0001276960500405421, + "loss": 0.3065, + "step": 28090 + }, + { + "epoch": 1.084983976215298, + "grad_norm": 1.790323257446289, + "learning_rate": 0.00012767030902094032, + "loss": 0.182, + "step": 28100 + }, + { + "epoch": 1.0853700915093247, + "grad_norm": 1.9291974306106567, + "learning_rate": 0.00012764456800133854, + "loss": 0.3798, + "step": 28110 + }, + { + "epoch": 1.0857562068033515, + "grad_norm": 1.36685049533844, + "learning_rate": 0.00012761882698173675, + "loss": 0.1893, + "step": 28120 + }, + { + "epoch": 1.0861423220973783, + "grad_norm": 2.52441668510437, + "learning_rate": 0.00012759308596213496, + "loss": 0.2389, + "step": 28130 + }, + { + "epoch": 1.086528437391405, + "grad_norm": 0.9578754901885986, + "learning_rate": 0.00012756734494253318, + "loss": 0.3133, + "step": 28140 + }, + { + "epoch": 1.0869145526854318, + "grad_norm": 0.17957572638988495, + "learning_rate": 0.0001275416039229314, + "loss": 0.1708, + "step": 28150 + }, + { + "epoch": 1.0873006679794586, + "grad_norm": 1.7213740348815918, + "learning_rate": 0.0001275158629033296, + "loss": 0.3831, + "step": 28160 + }, + { + "epoch": 1.0876867832734856, + "grad_norm": 1.2460767030715942, + "learning_rate": 0.00012749012188372782, + "loss": 0.266, + "step": 28170 + }, + { + "epoch": 1.0880728985675123, + "grad_norm": 0.33691835403442383, + "learning_rate": 0.00012746438086412606, + "loss": 0.216, + "step": 28180 + }, + { + "epoch": 1.088459013861539, + "grad_norm": 0.23887981474399567, + "learning_rate": 0.00012743863984452424, + "loss": 0.2331, + "step": 28190 + }, + { + "epoch": 1.0888451291555659, + "grad_norm": 4.753499984741211, + "learning_rate": 0.00012741289882492246, + "loss": 0.1845, + "step": 28200 + }, + { + "epoch": 1.0892312444495926, + "grad_norm": 1.055097222328186, + "learning_rate": 0.00012738715780532067, + "loss": 0.2978, + "step": 28210 + }, + { + "epoch": 1.0896173597436194, + "grad_norm": 2.3080852031707764, + "learning_rate": 0.00012736141678571888, + "loss": 0.1838, + "step": 28220 + }, + { + "epoch": 1.0900034750376462, + "grad_norm": 0.3733162581920624, + "learning_rate": 0.00012733567576611712, + "loss": 0.1941, + "step": 28230 + }, + { + "epoch": 1.090389590331673, + "grad_norm": 2.247748613357544, + "learning_rate": 0.0001273099347465153, + "loss": 0.2594, + "step": 28240 + }, + { + "epoch": 1.0907757056257, + "grad_norm": 1.644177794456482, + "learning_rate": 0.00012728419372691355, + "loss": 0.392, + "step": 28250 + }, + { + "epoch": 1.0911618209197267, + "grad_norm": 2.3522965908050537, + "learning_rate": 0.00012725845270731174, + "loss": 0.2198, + "step": 28260 + }, + { + "epoch": 1.0915479362137535, + "grad_norm": 0.2335210144519806, + "learning_rate": 0.00012723271168770995, + "loss": 0.3363, + "step": 28270 + }, + { + "epoch": 1.0919340515077802, + "grad_norm": 0.476607084274292, + "learning_rate": 0.00012720697066810816, + "loss": 0.1803, + "step": 28280 + }, + { + "epoch": 1.092320166801807, + "grad_norm": 2.1482882499694824, + "learning_rate": 0.00012718122964850638, + "loss": 0.2567, + "step": 28290 + }, + { + "epoch": 1.0927062820958338, + "grad_norm": 1.6457593441009521, + "learning_rate": 0.00012715548862890462, + "loss": 0.196, + "step": 28300 + }, + { + "epoch": 1.0930923973898605, + "grad_norm": 2.0742087364196777, + "learning_rate": 0.0001271297476093028, + "loss": 0.3955, + "step": 28310 + }, + { + "epoch": 1.0934785126838875, + "grad_norm": 0.1588711142539978, + "learning_rate": 0.00012710400658970104, + "loss": 0.2113, + "step": 28320 + }, + { + "epoch": 1.0938646279779143, + "grad_norm": 4.303687572479248, + "learning_rate": 0.00012707826557009923, + "loss": 0.2941, + "step": 28330 + }, + { + "epoch": 1.094250743271941, + "grad_norm": 2.0096209049224854, + "learning_rate": 0.00012705252455049744, + "loss": 0.3092, + "step": 28340 + }, + { + "epoch": 1.0946368585659678, + "grad_norm": 0.49071142077445984, + "learning_rate": 0.00012702678353089566, + "loss": 0.2387, + "step": 28350 + }, + { + "epoch": 1.0950229738599946, + "grad_norm": 0.9084739089012146, + "learning_rate": 0.00012700104251129387, + "loss": 0.2199, + "step": 28360 + }, + { + "epoch": 1.0954090891540214, + "grad_norm": 2.076706647872925, + "learning_rate": 0.0001269753014916921, + "loss": 0.3652, + "step": 28370 + }, + { + "epoch": 1.0957952044480481, + "grad_norm": 0.13036206364631653, + "learning_rate": 0.0001269495604720903, + "loss": 0.2514, + "step": 28380 + }, + { + "epoch": 1.0961813197420749, + "grad_norm": 0.146321102976799, + "learning_rate": 0.00012692381945248854, + "loss": 0.2721, + "step": 28390 + }, + { + "epoch": 1.0965674350361019, + "grad_norm": 0.8172006607055664, + "learning_rate": 0.00012689807843288672, + "loss": 0.16, + "step": 28400 + }, + { + "epoch": 1.0969535503301286, + "grad_norm": 1.099068522453308, + "learning_rate": 0.00012687233741328494, + "loss": 0.2489, + "step": 28410 + }, + { + "epoch": 1.0973396656241554, + "grad_norm": 0.6757088899612427, + "learning_rate": 0.00012684659639368318, + "loss": 0.1449, + "step": 28420 + }, + { + "epoch": 1.0977257809181822, + "grad_norm": 0.11124458909034729, + "learning_rate": 0.00012682085537408136, + "loss": 0.2202, + "step": 28430 + }, + { + "epoch": 1.098111896212209, + "grad_norm": 2.357466220855713, + "learning_rate": 0.0001267951143544796, + "loss": 0.2669, + "step": 28440 + }, + { + "epoch": 1.0984980115062357, + "grad_norm": 4.569977760314941, + "learning_rate": 0.0001267693733348778, + "loss": 0.3054, + "step": 28450 + }, + { + "epoch": 1.0988841268002625, + "grad_norm": 1.150667667388916, + "learning_rate": 0.00012674363231527603, + "loss": 0.1519, + "step": 28460 + }, + { + "epoch": 1.0992702420942893, + "grad_norm": 2.016101360321045, + "learning_rate": 0.00012671789129567421, + "loss": 0.2307, + "step": 28470 + }, + { + "epoch": 1.0996563573883162, + "grad_norm": 1.2213127613067627, + "learning_rate": 0.00012669215027607243, + "loss": 0.2847, + "step": 28480 + }, + { + "epoch": 1.100042472682343, + "grad_norm": 2.8080902099609375, + "learning_rate": 0.00012666640925647067, + "loss": 0.2295, + "step": 28490 + }, + { + "epoch": 1.1004285879763698, + "grad_norm": 1.4878045320510864, + "learning_rate": 0.00012664066823686885, + "loss": 0.1497, + "step": 28500 + }, + { + "epoch": 1.1008147032703965, + "grad_norm": 0.7453703880310059, + "learning_rate": 0.0001266149272172671, + "loss": 0.2052, + "step": 28510 + }, + { + "epoch": 1.1012008185644233, + "grad_norm": 0.2775499224662781, + "learning_rate": 0.00012658918619766528, + "loss": 0.1576, + "step": 28520 + }, + { + "epoch": 1.10158693385845, + "grad_norm": 1.0527644157409668, + "learning_rate": 0.00012656344517806352, + "loss": 0.2957, + "step": 28530 + }, + { + "epoch": 1.1019730491524768, + "grad_norm": 0.6511454582214355, + "learning_rate": 0.0001265377041584617, + "loss": 0.2336, + "step": 28540 + }, + { + "epoch": 1.1023591644465038, + "grad_norm": 0.32867324352264404, + "learning_rate": 0.00012651196313885992, + "loss": 0.1745, + "step": 28550 + }, + { + "epoch": 1.1027452797405306, + "grad_norm": 2.4408578872680664, + "learning_rate": 0.00012648622211925816, + "loss": 0.2195, + "step": 28560 + }, + { + "epoch": 1.1031313950345574, + "grad_norm": 1.4876518249511719, + "learning_rate": 0.00012646048109965635, + "loss": 0.5123, + "step": 28570 + }, + { + "epoch": 1.1035175103285841, + "grad_norm": 1.9403778314590454, + "learning_rate": 0.0001264347400800546, + "loss": 0.2783, + "step": 28580 + }, + { + "epoch": 1.103903625622611, + "grad_norm": 1.215280532836914, + "learning_rate": 0.00012640899906045277, + "loss": 0.2661, + "step": 28590 + }, + { + "epoch": 1.1042897409166377, + "grad_norm": 0.7179967164993286, + "learning_rate": 0.00012638325804085102, + "loss": 0.1756, + "step": 28600 + }, + { + "epoch": 1.1046758562106644, + "grad_norm": 1.9051718711853027, + "learning_rate": 0.00012635751702124923, + "loss": 0.2724, + "step": 28610 + }, + { + "epoch": 1.1050619715046912, + "grad_norm": 1.5659642219543457, + "learning_rate": 0.00012633177600164744, + "loss": 0.3891, + "step": 28620 + }, + { + "epoch": 1.1054480867987182, + "grad_norm": 0.9210501313209534, + "learning_rate": 0.00012630603498204566, + "loss": 0.1369, + "step": 28630 + }, + { + "epoch": 1.105834202092745, + "grad_norm": 0.5428475141525269, + "learning_rate": 0.00012628029396244384, + "loss": 0.1173, + "step": 28640 + }, + { + "epoch": 1.1062203173867717, + "grad_norm": 1.7399749755859375, + "learning_rate": 0.00012625455294284208, + "loss": 0.3005, + "step": 28650 + }, + { + "epoch": 1.1066064326807985, + "grad_norm": 0.09703828394412994, + "learning_rate": 0.00012622881192324027, + "loss": 0.3346, + "step": 28660 + }, + { + "epoch": 1.1069925479748253, + "grad_norm": 0.5834600329399109, + "learning_rate": 0.0001262030709036385, + "loss": 0.2689, + "step": 28670 + }, + { + "epoch": 1.107378663268852, + "grad_norm": 1.0677303075790405, + "learning_rate": 0.00012617732988403672, + "loss": 0.5609, + "step": 28680 + }, + { + "epoch": 1.1077647785628788, + "grad_norm": 1.6462419033050537, + "learning_rate": 0.00012615158886443493, + "loss": 0.1991, + "step": 28690 + }, + { + "epoch": 1.1081508938569056, + "grad_norm": 0.933779776096344, + "learning_rate": 0.00012612584784483315, + "loss": 0.2757, + "step": 28700 + }, + { + "epoch": 1.1085370091509326, + "grad_norm": 1.3413206338882446, + "learning_rate": 0.00012610010682523133, + "loss": 0.1798, + "step": 28710 + }, + { + "epoch": 1.1089231244449593, + "grad_norm": 4.479143142700195, + "learning_rate": 0.00012607436580562957, + "loss": 0.1777, + "step": 28720 + }, + { + "epoch": 1.109309239738986, + "grad_norm": 1.5768260955810547, + "learning_rate": 0.0001260486247860278, + "loss": 0.1967, + "step": 28730 + }, + { + "epoch": 1.1096953550330129, + "grad_norm": 2.1206741333007812, + "learning_rate": 0.000126022883766426, + "loss": 0.2399, + "step": 28740 + }, + { + "epoch": 1.1100814703270396, + "grad_norm": 1.4531667232513428, + "learning_rate": 0.00012599714274682421, + "loss": 0.3464, + "step": 28750 + }, + { + "epoch": 1.1104675856210664, + "grad_norm": 1.7988258600234985, + "learning_rate": 0.00012597140172722243, + "loss": 0.285, + "step": 28760 + }, + { + "epoch": 1.1108537009150932, + "grad_norm": 1.094808578491211, + "learning_rate": 0.00012594566070762064, + "loss": 0.2194, + "step": 28770 + }, + { + "epoch": 1.1112398162091202, + "grad_norm": 1.3884358406066895, + "learning_rate": 0.00012591991968801883, + "loss": 0.4267, + "step": 28780 + }, + { + "epoch": 1.111625931503147, + "grad_norm": 2.743480920791626, + "learning_rate": 0.00012589417866841707, + "loss": 0.3333, + "step": 28790 + }, + { + "epoch": 1.1120120467971737, + "grad_norm": 1.0373203754425049, + "learning_rate": 0.00012586843764881528, + "loss": 0.3941, + "step": 28800 + }, + { + "epoch": 1.1123981620912005, + "grad_norm": 2.018101692199707, + "learning_rate": 0.0001258426966292135, + "loss": 0.2928, + "step": 28810 + }, + { + "epoch": 1.1127842773852272, + "grad_norm": 2.567119836807251, + "learning_rate": 0.0001258169556096117, + "loss": 0.3597, + "step": 28820 + }, + { + "epoch": 1.113170392679254, + "grad_norm": 1.1235183477401733, + "learning_rate": 0.00012579121459000992, + "loss": 0.1807, + "step": 28830 + }, + { + "epoch": 1.1135565079732808, + "grad_norm": 1.3740451335906982, + "learning_rate": 0.00012576547357040813, + "loss": 0.2425, + "step": 28840 + }, + { + "epoch": 1.1139426232673075, + "grad_norm": 1.3751258850097656, + "learning_rate": 0.00012573973255080632, + "loss": 0.2116, + "step": 28850 + }, + { + "epoch": 1.1143287385613343, + "grad_norm": 0.9605401158332825, + "learning_rate": 0.00012571399153120456, + "loss": 0.2144, + "step": 28860 + }, + { + "epoch": 1.1147148538553613, + "grad_norm": 0.5127251148223877, + "learning_rate": 0.00012568825051160277, + "loss": 0.1902, + "step": 28870 + }, + { + "epoch": 1.115100969149388, + "grad_norm": 2.6720705032348633, + "learning_rate": 0.000125662509492001, + "loss": 0.3286, + "step": 28880 + }, + { + "epoch": 1.1154870844434148, + "grad_norm": 3.7196574211120605, + "learning_rate": 0.0001256367684723992, + "loss": 0.211, + "step": 28890 + }, + { + "epoch": 1.1158731997374416, + "grad_norm": 0.18796740472316742, + "learning_rate": 0.00012561102745279741, + "loss": 0.3351, + "step": 28900 + }, + { + "epoch": 1.1162593150314684, + "grad_norm": 1.3164410591125488, + "learning_rate": 0.00012558528643319563, + "loss": 0.1771, + "step": 28910 + }, + { + "epoch": 1.1166454303254951, + "grad_norm": 0.9552701115608215, + "learning_rate": 0.00012555954541359384, + "loss": 0.3302, + "step": 28920 + }, + { + "epoch": 1.117031545619522, + "grad_norm": 1.0072277784347534, + "learning_rate": 0.00012553380439399205, + "loss": 0.2936, + "step": 28930 + }, + { + "epoch": 1.1174176609135489, + "grad_norm": 0.24307872354984283, + "learning_rate": 0.00012550806337439027, + "loss": 0.2034, + "step": 28940 + }, + { + "epoch": 1.1178037762075757, + "grad_norm": 2.985166311264038, + "learning_rate": 0.00012548232235478848, + "loss": 0.2628, + "step": 28950 + }, + { + "epoch": 1.1181898915016024, + "grad_norm": 0.46555295586586, + "learning_rate": 0.0001254565813351867, + "loss": 0.1904, + "step": 28960 + }, + { + "epoch": 1.1185760067956292, + "grad_norm": 0.7813409566879272, + "learning_rate": 0.0001254308403155849, + "loss": 0.33, + "step": 28970 + }, + { + "epoch": 1.118962122089656, + "grad_norm": 2.4459455013275146, + "learning_rate": 0.00012540509929598312, + "loss": 0.3183, + "step": 28980 + }, + { + "epoch": 1.1193482373836827, + "grad_norm": 0.5164415240287781, + "learning_rate": 0.00012537935827638133, + "loss": 0.3959, + "step": 28990 + }, + { + "epoch": 1.1197343526777095, + "grad_norm": 0.3853105306625366, + "learning_rate": 0.00012535361725677955, + "loss": 0.1426, + "step": 29000 + }, + { + "epoch": 1.1201204679717365, + "grad_norm": 0.5817530751228333, + "learning_rate": 0.00012532787623717776, + "loss": 0.2218, + "step": 29010 + }, + { + "epoch": 1.1205065832657632, + "grad_norm": 1.264248251914978, + "learning_rate": 0.00012530213521757597, + "loss": 0.3002, + "step": 29020 + }, + { + "epoch": 1.12089269855979, + "grad_norm": 1.999251127243042, + "learning_rate": 0.0001252763941979742, + "loss": 0.3573, + "step": 29030 + }, + { + "epoch": 1.1212788138538168, + "grad_norm": 1.760797381401062, + "learning_rate": 0.0001252506531783724, + "loss": 0.3829, + "step": 29040 + }, + { + "epoch": 1.1216649291478435, + "grad_norm": 1.4757565259933472, + "learning_rate": 0.00012522491215877061, + "loss": 0.3407, + "step": 29050 + }, + { + "epoch": 1.1220510444418703, + "grad_norm": 0.08838029205799103, + "learning_rate": 0.00012519917113916883, + "loss": 0.1899, + "step": 29060 + }, + { + "epoch": 1.122437159735897, + "grad_norm": 0.6416037678718567, + "learning_rate": 0.00012517343011956704, + "loss": 0.277, + "step": 29070 + }, + { + "epoch": 1.1228232750299239, + "grad_norm": 2.9282822608947754, + "learning_rate": 0.00012514768909996525, + "loss": 0.3001, + "step": 29080 + }, + { + "epoch": 1.1232093903239506, + "grad_norm": 2.5984582901000977, + "learning_rate": 0.00012512194808036347, + "loss": 0.2326, + "step": 29090 + }, + { + "epoch": 1.1235955056179776, + "grad_norm": 1.0622142553329468, + "learning_rate": 0.00012509620706076168, + "loss": 0.1494, + "step": 29100 + }, + { + "epoch": 1.1239816209120044, + "grad_norm": 1.5386018753051758, + "learning_rate": 0.0001250704660411599, + "loss": 0.3392, + "step": 29110 + }, + { + "epoch": 1.1243677362060311, + "grad_norm": 0.8901385068893433, + "learning_rate": 0.0001250447250215581, + "loss": 0.2651, + "step": 29120 + }, + { + "epoch": 1.124753851500058, + "grad_norm": 2.0237483978271484, + "learning_rate": 0.00012501898400195632, + "loss": 0.3764, + "step": 29130 + }, + { + "epoch": 1.1251399667940847, + "grad_norm": 1.8989384174346924, + "learning_rate": 0.00012499324298235453, + "loss": 0.2713, + "step": 29140 + }, + { + "epoch": 1.1255260820881114, + "grad_norm": 2.704643487930298, + "learning_rate": 0.00012496750196275275, + "loss": 0.1362, + "step": 29150 + }, + { + "epoch": 1.1259121973821382, + "grad_norm": 1.2598273754119873, + "learning_rate": 0.00012494176094315096, + "loss": 0.331, + "step": 29160 + }, + { + "epoch": 1.1262983126761652, + "grad_norm": 2.2073826789855957, + "learning_rate": 0.00012491601992354917, + "loss": 0.209, + "step": 29170 + }, + { + "epoch": 1.126684427970192, + "grad_norm": 0.8338522911071777, + "learning_rate": 0.0001248902789039474, + "loss": 0.2583, + "step": 29180 + }, + { + "epoch": 1.1270705432642187, + "grad_norm": 0.49807825684547424, + "learning_rate": 0.0001248645378843456, + "loss": 0.2702, + "step": 29190 + }, + { + "epoch": 1.1274566585582455, + "grad_norm": 2.2561802864074707, + "learning_rate": 0.00012483879686474381, + "loss": 0.2667, + "step": 29200 + }, + { + "epoch": 1.1278427738522723, + "grad_norm": 0.6450731754302979, + "learning_rate": 0.00012481305584514203, + "loss": 0.2592, + "step": 29210 + }, + { + "epoch": 1.128228889146299, + "grad_norm": 1.214436650276184, + "learning_rate": 0.00012478731482554024, + "loss": 0.2285, + "step": 29220 + }, + { + "epoch": 1.1286150044403258, + "grad_norm": 1.2677173614501953, + "learning_rate": 0.00012476157380593848, + "loss": 0.1732, + "step": 29230 + }, + { + "epoch": 1.1290011197343528, + "grad_norm": 2.034266471862793, + "learning_rate": 0.00012473583278633667, + "loss": 0.3679, + "step": 29240 + }, + { + "epoch": 1.1293872350283796, + "grad_norm": 2.005913496017456, + "learning_rate": 0.00012471009176673488, + "loss": 0.2703, + "step": 29250 + }, + { + "epoch": 1.1297733503224063, + "grad_norm": 1.818928599357605, + "learning_rate": 0.0001246843507471331, + "loss": 0.3355, + "step": 29260 + }, + { + "epoch": 1.130159465616433, + "grad_norm": 0.5393241047859192, + "learning_rate": 0.0001246586097275313, + "loss": 0.1043, + "step": 29270 + }, + { + "epoch": 1.1305455809104599, + "grad_norm": 0.5508402585983276, + "learning_rate": 0.00012463286870792952, + "loss": 0.2538, + "step": 29280 + }, + { + "epoch": 1.1309316962044866, + "grad_norm": 1.1734035015106201, + "learning_rate": 0.00012460712768832773, + "loss": 0.3082, + "step": 29290 + }, + { + "epoch": 1.1313178114985134, + "grad_norm": 0.977611243724823, + "learning_rate": 0.00012458138666872597, + "loss": 0.1962, + "step": 29300 + }, + { + "epoch": 1.1317039267925402, + "grad_norm": 0.9720492362976074, + "learning_rate": 0.00012455564564912416, + "loss": 0.3462, + "step": 29310 + }, + { + "epoch": 1.132090042086567, + "grad_norm": 1.20888352394104, + "learning_rate": 0.00012452990462952237, + "loss": 0.1229, + "step": 29320 + }, + { + "epoch": 1.132476157380594, + "grad_norm": 0.7969954609870911, + "learning_rate": 0.0001245041636099206, + "loss": 0.4213, + "step": 29330 + }, + { + "epoch": 1.1328622726746207, + "grad_norm": 0.07595942914485931, + "learning_rate": 0.0001244784225903188, + "loss": 0.2148, + "step": 29340 + }, + { + "epoch": 1.1332483879686475, + "grad_norm": 0.15456156432628632, + "learning_rate": 0.000124452681570717, + "loss": 0.4713, + "step": 29350 + }, + { + "epoch": 1.1336345032626742, + "grad_norm": 1.232366681098938, + "learning_rate": 0.00012442694055111523, + "loss": 0.2446, + "step": 29360 + }, + { + "epoch": 1.134020618556701, + "grad_norm": 2.3669209480285645, + "learning_rate": 0.00012440119953151347, + "loss": 0.3025, + "step": 29370 + }, + { + "epoch": 1.1344067338507278, + "grad_norm": 4.639179229736328, + "learning_rate": 0.00012437545851191165, + "loss": 0.337, + "step": 29380 + }, + { + "epoch": 1.1347928491447545, + "grad_norm": 0.700533926486969, + "learning_rate": 0.0001243497174923099, + "loss": 0.1747, + "step": 29390 + }, + { + "epoch": 1.1351789644387815, + "grad_norm": 0.5738794803619385, + "learning_rate": 0.00012432397647270808, + "loss": 0.357, + "step": 29400 + }, + { + "epoch": 1.1355650797328083, + "grad_norm": 2.620095729827881, + "learning_rate": 0.0001242982354531063, + "loss": 0.2885, + "step": 29410 + }, + { + "epoch": 1.135951195026835, + "grad_norm": 1.5040203332901, + "learning_rate": 0.00012427249443350453, + "loss": 0.2481, + "step": 29420 + }, + { + "epoch": 1.1363373103208618, + "grad_norm": 0.7409051060676575, + "learning_rate": 0.00012424675341390272, + "loss": 0.3365, + "step": 29430 + }, + { + "epoch": 1.1367234256148886, + "grad_norm": 0.6730226874351501, + "learning_rate": 0.00012422101239430096, + "loss": 0.1508, + "step": 29440 + }, + { + "epoch": 1.1371095409089154, + "grad_norm": 2.1389102935791016, + "learning_rate": 0.00012419527137469915, + "loss": 0.3832, + "step": 29450 + }, + { + "epoch": 1.1374956562029421, + "grad_norm": 0.5423761606216431, + "learning_rate": 0.0001241695303550974, + "loss": 0.2942, + "step": 29460 + }, + { + "epoch": 1.1378817714969691, + "grad_norm": 2.6076724529266357, + "learning_rate": 0.00012414378933549557, + "loss": 0.2291, + "step": 29470 + }, + { + "epoch": 1.1382678867909959, + "grad_norm": 1.0197224617004395, + "learning_rate": 0.0001241180483158938, + "loss": 0.2309, + "step": 29480 + }, + { + "epoch": 1.1386540020850227, + "grad_norm": 1.4430413246154785, + "learning_rate": 0.00012409230729629203, + "loss": 0.258, + "step": 29490 + }, + { + "epoch": 1.1390401173790494, + "grad_norm": 1.43483304977417, + "learning_rate": 0.0001240665662766902, + "loss": 0.1931, + "step": 29500 + }, + { + "epoch": 1.1394262326730762, + "grad_norm": 0.7181301116943359, + "learning_rate": 0.00012404082525708845, + "loss": 0.2675, + "step": 29510 + }, + { + "epoch": 1.139812347967103, + "grad_norm": 2.2020421028137207, + "learning_rate": 0.00012401508423748664, + "loss": 0.5094, + "step": 29520 + }, + { + "epoch": 1.1401984632611297, + "grad_norm": 0.35750746726989746, + "learning_rate": 0.00012398934321788488, + "loss": 0.3825, + "step": 29530 + }, + { + "epoch": 1.1405845785551565, + "grad_norm": 2.1792123317718506, + "learning_rate": 0.0001239636021982831, + "loss": 0.4259, + "step": 29540 + }, + { + "epoch": 1.1409706938491833, + "grad_norm": 1.2699453830718994, + "learning_rate": 0.00012393786117868128, + "loss": 0.2524, + "step": 29550 + }, + { + "epoch": 1.1413568091432102, + "grad_norm": 4.232237339019775, + "learning_rate": 0.00012391212015907952, + "loss": 0.4191, + "step": 29560 + }, + { + "epoch": 1.141742924437237, + "grad_norm": 1.5009098052978516, + "learning_rate": 0.0001238863791394777, + "loss": 0.5748, + "step": 29570 + }, + { + "epoch": 1.1421290397312638, + "grad_norm": 0.8117336630821228, + "learning_rate": 0.00012386063811987595, + "loss": 0.2309, + "step": 29580 + }, + { + "epoch": 1.1425151550252906, + "grad_norm": 0.6417378187179565, + "learning_rate": 0.00012383489710027413, + "loss": 0.3011, + "step": 29590 + }, + { + "epoch": 1.1429012703193173, + "grad_norm": 0.19958554208278656, + "learning_rate": 0.00012380915608067237, + "loss": 0.4943, + "step": 29600 + }, + { + "epoch": 1.143287385613344, + "grad_norm": 1.7980111837387085, + "learning_rate": 0.0001237834150610706, + "loss": 0.2498, + "step": 29610 + }, + { + "epoch": 1.1436735009073709, + "grad_norm": 0.7506774663925171, + "learning_rate": 0.00012375767404146877, + "loss": 0.1622, + "step": 29620 + }, + { + "epoch": 1.1440596162013978, + "grad_norm": 0.4346953332424164, + "learning_rate": 0.000123731933021867, + "loss": 0.2608, + "step": 29630 + }, + { + "epoch": 1.1444457314954246, + "grad_norm": 1.4491907358169556, + "learning_rate": 0.0001237061920022652, + "loss": 0.4674, + "step": 29640 + }, + { + "epoch": 1.1448318467894514, + "grad_norm": 0.26328304409980774, + "learning_rate": 0.00012368045098266344, + "loss": 0.4287, + "step": 29650 + }, + { + "epoch": 1.1452179620834781, + "grad_norm": 0.6601302027702332, + "learning_rate": 0.00012365470996306163, + "loss": 0.3235, + "step": 29660 + }, + { + "epoch": 1.145604077377505, + "grad_norm": 3.9714503288269043, + "learning_rate": 0.00012362896894345987, + "loss": 0.2969, + "step": 29670 + }, + { + "epoch": 1.1459901926715317, + "grad_norm": 2.6341910362243652, + "learning_rate": 0.00012360322792385808, + "loss": 0.2771, + "step": 29680 + }, + { + "epoch": 1.1463763079655584, + "grad_norm": 0.04610513150691986, + "learning_rate": 0.00012357748690425627, + "loss": 0.2286, + "step": 29690 + }, + { + "epoch": 1.1467624232595852, + "grad_norm": 0.18892113864421844, + "learning_rate": 0.0001235517458846545, + "loss": 0.1821, + "step": 29700 + }, + { + "epoch": 1.147148538553612, + "grad_norm": 2.186973810195923, + "learning_rate": 0.0001235260048650527, + "loss": 0.2582, + "step": 29710 + }, + { + "epoch": 1.147534653847639, + "grad_norm": 0.23074299097061157, + "learning_rate": 0.00012350026384545093, + "loss": 0.3134, + "step": 29720 + }, + { + "epoch": 1.1479207691416657, + "grad_norm": 4.021410942077637, + "learning_rate": 0.00012347452282584915, + "loss": 0.3161, + "step": 29730 + }, + { + "epoch": 1.1483068844356925, + "grad_norm": 1.0262142419815063, + "learning_rate": 0.00012344878180624736, + "loss": 0.2034, + "step": 29740 + }, + { + "epoch": 1.1486929997297193, + "grad_norm": 1.4075149297714233, + "learning_rate": 0.00012342304078664557, + "loss": 0.2799, + "step": 29750 + }, + { + "epoch": 1.149079115023746, + "grad_norm": 0.9609633684158325, + "learning_rate": 0.00012339729976704376, + "loss": 0.1358, + "step": 29760 + }, + { + "epoch": 1.1494652303177728, + "grad_norm": 2.0923006534576416, + "learning_rate": 0.000123371558747442, + "loss": 0.224, + "step": 29770 + }, + { + "epoch": 1.1498513456117996, + "grad_norm": 0.4723201096057892, + "learning_rate": 0.00012334581772784019, + "loss": 0.1231, + "step": 29780 + }, + { + "epoch": 1.1502374609058266, + "grad_norm": 0.2070016860961914, + "learning_rate": 0.00012332007670823843, + "loss": 0.2998, + "step": 29790 + }, + { + "epoch": 1.1506235761998533, + "grad_norm": 0.3871285021305084, + "learning_rate": 0.00012329433568863664, + "loss": 0.2776, + "step": 29800 + }, + { + "epoch": 1.15100969149388, + "grad_norm": 0.6765030026435852, + "learning_rate": 0.00012326859466903485, + "loss": 0.6047, + "step": 29810 + }, + { + "epoch": 1.1513958067879069, + "grad_norm": 0.22907795011997223, + "learning_rate": 0.00012324285364943307, + "loss": 0.1573, + "step": 29820 + }, + { + "epoch": 1.1517819220819336, + "grad_norm": 0.3838706612586975, + "learning_rate": 0.00012321711262983128, + "loss": 0.1877, + "step": 29830 + }, + { + "epoch": 1.1521680373759604, + "grad_norm": 1.6967968940734863, + "learning_rate": 0.0001231913716102295, + "loss": 0.2314, + "step": 29840 + }, + { + "epoch": 1.1525541526699872, + "grad_norm": 0.6754477024078369, + "learning_rate": 0.00012316563059062768, + "loss": 0.1997, + "step": 29850 + }, + { + "epoch": 1.1529402679640142, + "grad_norm": 0.8980739712715149, + "learning_rate": 0.00012313988957102592, + "loss": 0.2729, + "step": 29860 + }, + { + "epoch": 1.153326383258041, + "grad_norm": 2.7968523502349854, + "learning_rate": 0.00012311414855142413, + "loss": 0.2805, + "step": 29870 + }, + { + "epoch": 1.1537124985520677, + "grad_norm": 1.5867468118667603, + "learning_rate": 0.00012308840753182235, + "loss": 0.3489, + "step": 29880 + }, + { + "epoch": 1.1540986138460945, + "grad_norm": 3.077193021774292, + "learning_rate": 0.00012306266651222056, + "loss": 0.3397, + "step": 29890 + }, + { + "epoch": 1.1544847291401212, + "grad_norm": 0.49896860122680664, + "learning_rate": 0.00012303692549261877, + "loss": 0.1215, + "step": 29900 + }, + { + "epoch": 1.154870844434148, + "grad_norm": 0.9598873257637024, + "learning_rate": 0.00012301118447301699, + "loss": 0.1618, + "step": 29910 + }, + { + "epoch": 1.1552569597281748, + "grad_norm": 1.2659916877746582, + "learning_rate": 0.0001229854434534152, + "loss": 0.1623, + "step": 29920 + }, + { + "epoch": 1.1556430750222015, + "grad_norm": 1.4009841680526733, + "learning_rate": 0.0001229597024338134, + "loss": 0.2205, + "step": 29930 + }, + { + "epoch": 1.1560291903162283, + "grad_norm": 0.5847800970077515, + "learning_rate": 0.00012293396141421163, + "loss": 0.3173, + "step": 29940 + }, + { + "epoch": 1.1564153056102553, + "grad_norm": 1.2930784225463867, + "learning_rate": 0.00012290822039460984, + "loss": 0.3052, + "step": 29950 + }, + { + "epoch": 1.156801420904282, + "grad_norm": 0.7307919859886169, + "learning_rate": 0.00012288247937500805, + "loss": 0.2293, + "step": 29960 + }, + { + "epoch": 1.1571875361983088, + "grad_norm": 0.2120385468006134, + "learning_rate": 0.00012285673835540627, + "loss": 0.2006, + "step": 29970 + }, + { + "epoch": 1.1575736514923356, + "grad_norm": 1.0932674407958984, + "learning_rate": 0.00012283099733580448, + "loss": 0.1716, + "step": 29980 + }, + { + "epoch": 1.1579597667863624, + "grad_norm": 0.7607210874557495, + "learning_rate": 0.0001228052563162027, + "loss": 0.346, + "step": 29990 + }, + { + "epoch": 1.1583458820803891, + "grad_norm": 4.455061435699463, + "learning_rate": 0.0001227795152966009, + "loss": 0.2588, + "step": 30000 + }, + { + "epoch": 1.158731997374416, + "grad_norm": 2.0812416076660156, + "learning_rate": 0.00012275377427699912, + "loss": 0.2958, + "step": 30010 + }, + { + "epoch": 1.159118112668443, + "grad_norm": 1.8883334398269653, + "learning_rate": 0.00012272803325739733, + "loss": 0.1687, + "step": 30020 + }, + { + "epoch": 1.1595042279624697, + "grad_norm": 1.3935341835021973, + "learning_rate": 0.00012270229223779555, + "loss": 0.2276, + "step": 30030 + }, + { + "epoch": 1.1598903432564964, + "grad_norm": 2.5193631649017334, + "learning_rate": 0.00012267655121819376, + "loss": 0.3598, + "step": 30040 + }, + { + "epoch": 1.1602764585505232, + "grad_norm": 1.9172290563583374, + "learning_rate": 0.00012265081019859197, + "loss": 0.3294, + "step": 30050 + }, + { + "epoch": 1.16066257384455, + "grad_norm": 1.2220836877822876, + "learning_rate": 0.00012262506917899019, + "loss": 0.2053, + "step": 30060 + }, + { + "epoch": 1.1610486891385767, + "grad_norm": 1.9804691076278687, + "learning_rate": 0.0001225993281593884, + "loss": 0.2091, + "step": 30070 + }, + { + "epoch": 1.1614348044326035, + "grad_norm": 1.2194398641586304, + "learning_rate": 0.0001225735871397866, + "loss": 0.2535, + "step": 30080 + }, + { + "epoch": 1.1618209197266305, + "grad_norm": 0.7828000783920288, + "learning_rate": 0.00012254784612018483, + "loss": 0.436, + "step": 30090 + }, + { + "epoch": 1.1622070350206573, + "grad_norm": 0.26130637526512146, + "learning_rate": 0.00012252210510058304, + "loss": 0.3351, + "step": 30100 + }, + { + "epoch": 1.162593150314684, + "grad_norm": 0.9175068736076355, + "learning_rate": 0.00012249636408098125, + "loss": 0.0682, + "step": 30110 + }, + { + "epoch": 1.1629792656087108, + "grad_norm": 0.35828933119773865, + "learning_rate": 0.00012247062306137947, + "loss": 0.3081, + "step": 30120 + }, + { + "epoch": 1.1633653809027376, + "grad_norm": 1.2477439641952515, + "learning_rate": 0.00012244488204177768, + "loss": 0.2522, + "step": 30130 + }, + { + "epoch": 1.1637514961967643, + "grad_norm": 1.0479830503463745, + "learning_rate": 0.0001224191410221759, + "loss": 0.3183, + "step": 30140 + }, + { + "epoch": 1.164137611490791, + "grad_norm": 0.9295257329940796, + "learning_rate": 0.0001223934000025741, + "loss": 0.149, + "step": 30150 + }, + { + "epoch": 1.1645237267848179, + "grad_norm": 1.4081065654754639, + "learning_rate": 0.00012236765898297232, + "loss": 0.2403, + "step": 30160 + }, + { + "epoch": 1.1649098420788446, + "grad_norm": 0.6170324087142944, + "learning_rate": 0.00012234191796337053, + "loss": 0.3526, + "step": 30170 + }, + { + "epoch": 1.1652959573728716, + "grad_norm": 3.095670461654663, + "learning_rate": 0.00012231617694376875, + "loss": 0.2873, + "step": 30180 + }, + { + "epoch": 1.1656820726668984, + "grad_norm": 1.453447937965393, + "learning_rate": 0.00012229043592416696, + "loss": 0.2753, + "step": 30190 + }, + { + "epoch": 1.1660681879609252, + "grad_norm": 1.008033275604248, + "learning_rate": 0.00012226469490456517, + "loss": 0.2926, + "step": 30200 + }, + { + "epoch": 1.166454303254952, + "grad_norm": 2.122175693511963, + "learning_rate": 0.00012223895388496339, + "loss": 0.2432, + "step": 30210 + }, + { + "epoch": 1.1668404185489787, + "grad_norm": 1.4835058450698853, + "learning_rate": 0.0001222132128653616, + "loss": 0.2891, + "step": 30220 + }, + { + "epoch": 1.1672265338430055, + "grad_norm": 1.7386225461959839, + "learning_rate": 0.00012218747184575984, + "loss": 0.2371, + "step": 30230 + }, + { + "epoch": 1.1676126491370322, + "grad_norm": 2.1769731044769287, + "learning_rate": 0.00012216173082615803, + "loss": 0.2798, + "step": 30240 + }, + { + "epoch": 1.1679987644310592, + "grad_norm": 3.3941900730133057, + "learning_rate": 0.00012213598980655624, + "loss": 0.4021, + "step": 30250 + }, + { + "epoch": 1.168384879725086, + "grad_norm": 1.099238395690918, + "learning_rate": 0.00012211024878695445, + "loss": 0.2908, + "step": 30260 + }, + { + "epoch": 1.1687709950191127, + "grad_norm": 0.6718109250068665, + "learning_rate": 0.00012208450776735267, + "loss": 0.1791, + "step": 30270 + }, + { + "epoch": 1.1691571103131395, + "grad_norm": 0.25414201617240906, + "learning_rate": 0.00012205876674775088, + "loss": 0.1322, + "step": 30280 + }, + { + "epoch": 1.1695432256071663, + "grad_norm": 2.1115262508392334, + "learning_rate": 0.00012203302572814909, + "loss": 0.3538, + "step": 30290 + }, + { + "epoch": 1.169929340901193, + "grad_norm": 2.3652501106262207, + "learning_rate": 0.00012200728470854732, + "loss": 0.3229, + "step": 30300 + }, + { + "epoch": 1.1703154561952198, + "grad_norm": 1.4749270677566528, + "learning_rate": 0.00012198154368894552, + "loss": 0.1251, + "step": 30310 + }, + { + "epoch": 1.1707015714892468, + "grad_norm": 0.6566292643547058, + "learning_rate": 0.00012195580266934375, + "loss": 0.2527, + "step": 30320 + }, + { + "epoch": 1.1710876867832736, + "grad_norm": 1.9602152109146118, + "learning_rate": 0.00012193006164974195, + "loss": 0.1851, + "step": 30330 + }, + { + "epoch": 1.1714738020773003, + "grad_norm": 1.6631299257278442, + "learning_rate": 0.00012190432063014017, + "loss": 0.4715, + "step": 30340 + }, + { + "epoch": 1.171859917371327, + "grad_norm": 1.1554430723190308, + "learning_rate": 0.00012187857961053839, + "loss": 0.361, + "step": 30350 + }, + { + "epoch": 1.1722460326653539, + "grad_norm": 2.5738513469696045, + "learning_rate": 0.00012185283859093659, + "loss": 0.3661, + "step": 30360 + }, + { + "epoch": 1.1726321479593806, + "grad_norm": 0.5713154077529907, + "learning_rate": 0.00012182709757133481, + "loss": 0.2468, + "step": 30370 + }, + { + "epoch": 1.1730182632534074, + "grad_norm": 0.7371454834938049, + "learning_rate": 0.00012180135655173301, + "loss": 0.2949, + "step": 30380 + }, + { + "epoch": 1.1734043785474342, + "grad_norm": 2.5442118644714355, + "learning_rate": 0.00012177561553213124, + "loss": 0.2305, + "step": 30390 + }, + { + "epoch": 1.173790493841461, + "grad_norm": 1.684951663017273, + "learning_rate": 0.00012174987451252944, + "loss": 0.2535, + "step": 30400 + }, + { + "epoch": 1.174176609135488, + "grad_norm": 1.6874382495880127, + "learning_rate": 0.00012172413349292767, + "loss": 0.4069, + "step": 30410 + }, + { + "epoch": 1.1745627244295147, + "grad_norm": 0.46226370334625244, + "learning_rate": 0.00012169839247332588, + "loss": 0.221, + "step": 30420 + }, + { + "epoch": 1.1749488397235415, + "grad_norm": 3.5687646865844727, + "learning_rate": 0.00012167265145372408, + "loss": 0.2749, + "step": 30430 + }, + { + "epoch": 1.1753349550175682, + "grad_norm": 2.9427647590637207, + "learning_rate": 0.0001216469104341223, + "loss": 0.2621, + "step": 30440 + }, + { + "epoch": 1.175721070311595, + "grad_norm": 1.3569320440292358, + "learning_rate": 0.0001216211694145205, + "loss": 0.1711, + "step": 30450 + }, + { + "epoch": 1.1761071856056218, + "grad_norm": 0.5906672477722168, + "learning_rate": 0.00012159542839491873, + "loss": 0.3111, + "step": 30460 + }, + { + "epoch": 1.1764933008996485, + "grad_norm": 1.7809525728225708, + "learning_rate": 0.00012156968737531693, + "loss": 0.198, + "step": 30470 + }, + { + "epoch": 1.1768794161936755, + "grad_norm": 1.5865052938461304, + "learning_rate": 0.00012154394635571516, + "loss": 0.3589, + "step": 30480 + }, + { + "epoch": 1.1772655314877023, + "grad_norm": 0.4852294623851776, + "learning_rate": 0.00012151820533611337, + "loss": 0.203, + "step": 30490 + }, + { + "epoch": 1.177651646781729, + "grad_norm": 4.437458515167236, + "learning_rate": 0.00012149246431651158, + "loss": 0.2886, + "step": 30500 + }, + { + "epoch": 1.1780377620757558, + "grad_norm": 2.204751491546631, + "learning_rate": 0.0001214667232969098, + "loss": 0.313, + "step": 30510 + }, + { + "epoch": 1.1784238773697826, + "grad_norm": 0.9356504678726196, + "learning_rate": 0.000121440982277308, + "loss": 0.2813, + "step": 30520 + }, + { + "epoch": 1.1788099926638094, + "grad_norm": 0.06744952499866486, + "learning_rate": 0.00012141524125770622, + "loss": 0.2009, + "step": 30530 + }, + { + "epoch": 1.1791961079578361, + "grad_norm": 0.49779242277145386, + "learning_rate": 0.00012138950023810445, + "loss": 0.1822, + "step": 30540 + }, + { + "epoch": 1.1795822232518631, + "grad_norm": 1.1115593910217285, + "learning_rate": 0.00012136375921850265, + "loss": 0.4164, + "step": 30550 + }, + { + "epoch": 1.17996833854589, + "grad_norm": 0.2939944267272949, + "learning_rate": 0.00012133801819890086, + "loss": 0.1356, + "step": 30560 + }, + { + "epoch": 1.1803544538399167, + "grad_norm": 0.15905381739139557, + "learning_rate": 0.00012131227717929908, + "loss": 0.3479, + "step": 30570 + }, + { + "epoch": 1.1807405691339434, + "grad_norm": 3.146277666091919, + "learning_rate": 0.00012128653615969729, + "loss": 0.2722, + "step": 30580 + }, + { + "epoch": 1.1811266844279702, + "grad_norm": 0.60884690284729, + "learning_rate": 0.00012126079514009549, + "loss": 0.1778, + "step": 30590 + }, + { + "epoch": 1.181512799721997, + "grad_norm": 0.053204573690891266, + "learning_rate": 0.00012123505412049372, + "loss": 0.1397, + "step": 30600 + }, + { + "epoch": 1.1818989150160237, + "grad_norm": 1.2908227443695068, + "learning_rate": 0.00012120931310089194, + "loss": 0.1552, + "step": 30610 + }, + { + "epoch": 1.1822850303100505, + "grad_norm": 1.489575743675232, + "learning_rate": 0.00012118357208129014, + "loss": 0.2965, + "step": 30620 + }, + { + "epoch": 1.1826711456040773, + "grad_norm": 2.4103262424468994, + "learning_rate": 0.00012115783106168837, + "loss": 0.2947, + "step": 30630 + }, + { + "epoch": 1.1830572608981043, + "grad_norm": 0.3685878813266754, + "learning_rate": 0.00012113209004208657, + "loss": 0.2144, + "step": 30640 + }, + { + "epoch": 1.183443376192131, + "grad_norm": 0.29953858256340027, + "learning_rate": 0.00012110634902248478, + "loss": 0.2281, + "step": 30650 + }, + { + "epoch": 1.1838294914861578, + "grad_norm": 0.4792311191558838, + "learning_rate": 0.00012108060800288298, + "loss": 0.2727, + "step": 30660 + }, + { + "epoch": 1.1842156067801846, + "grad_norm": 0.17143972218036652, + "learning_rate": 0.00012105486698328121, + "loss": 0.2096, + "step": 30670 + }, + { + "epoch": 1.1846017220742113, + "grad_norm": 0.43678683042526245, + "learning_rate": 0.00012102912596367944, + "loss": 0.3818, + "step": 30680 + }, + { + "epoch": 1.184987837368238, + "grad_norm": 2.0908610820770264, + "learning_rate": 0.00012100338494407764, + "loss": 0.2496, + "step": 30690 + }, + { + "epoch": 1.1853739526622649, + "grad_norm": 1.5331153869628906, + "learning_rate": 0.00012097764392447586, + "loss": 0.2772, + "step": 30700 + }, + { + "epoch": 1.1857600679562919, + "grad_norm": 0.6948639154434204, + "learning_rate": 0.00012095190290487406, + "loss": 0.3896, + "step": 30710 + }, + { + "epoch": 1.1861461832503186, + "grad_norm": 2.3802030086517334, + "learning_rate": 0.00012092616188527228, + "loss": 0.1998, + "step": 30720 + }, + { + "epoch": 1.1865322985443454, + "grad_norm": 2.1358511447906494, + "learning_rate": 0.0001209004208656705, + "loss": 0.2865, + "step": 30730 + }, + { + "epoch": 1.1869184138383722, + "grad_norm": 0.8761110305786133, + "learning_rate": 0.0001208746798460687, + "loss": 0.2574, + "step": 30740 + }, + { + "epoch": 1.187304529132399, + "grad_norm": 0.6293401718139648, + "learning_rate": 0.00012084893882646693, + "loss": 0.4006, + "step": 30750 + }, + { + "epoch": 1.1876906444264257, + "grad_norm": 2.4534378051757812, + "learning_rate": 0.00012082319780686513, + "loss": 0.2281, + "step": 30760 + }, + { + "epoch": 1.1880767597204525, + "grad_norm": 0.38392508029937744, + "learning_rate": 0.00012079745678726336, + "loss": 0.3055, + "step": 30770 + }, + { + "epoch": 1.1884628750144794, + "grad_norm": 1.278108835220337, + "learning_rate": 0.00012077171576766156, + "loss": 0.2376, + "step": 30780 + }, + { + "epoch": 1.1888489903085062, + "grad_norm": 0.2575186789035797, + "learning_rate": 0.00012074597474805977, + "loss": 0.256, + "step": 30790 + }, + { + "epoch": 1.189235105602533, + "grad_norm": 4.043684959411621, + "learning_rate": 0.000120720233728458, + "loss": 0.3601, + "step": 30800 + }, + { + "epoch": 1.1896212208965598, + "grad_norm": 0.2561960220336914, + "learning_rate": 0.0001206944927088562, + "loss": 0.2698, + "step": 30810 + }, + { + "epoch": 1.1900073361905865, + "grad_norm": 0.3477579355239868, + "learning_rate": 0.00012066875168925442, + "loss": 0.3442, + "step": 30820 + }, + { + "epoch": 1.1903934514846133, + "grad_norm": 1.0357879400253296, + "learning_rate": 0.00012064301066965262, + "loss": 0.2561, + "step": 30830 + }, + { + "epoch": 1.19077956677864, + "grad_norm": 0.14653460681438446, + "learning_rate": 0.00012061726965005085, + "loss": 0.3072, + "step": 30840 + }, + { + "epoch": 1.1911656820726668, + "grad_norm": 1.2516767978668213, + "learning_rate": 0.00012059152863044906, + "loss": 0.4203, + "step": 30850 + }, + { + "epoch": 1.1915517973666936, + "grad_norm": 0.41542065143585205, + "learning_rate": 0.00012056578761084726, + "loss": 0.2018, + "step": 30860 + }, + { + "epoch": 1.1919379126607206, + "grad_norm": 1.7187318801879883, + "learning_rate": 0.00012054004659124549, + "loss": 0.097, + "step": 30870 + }, + { + "epoch": 1.1923240279547473, + "grad_norm": 0.3913117051124573, + "learning_rate": 0.00012051430557164369, + "loss": 0.1655, + "step": 30880 + }, + { + "epoch": 1.1927101432487741, + "grad_norm": 3.708519697189331, + "learning_rate": 0.00012048856455204192, + "loss": 0.2315, + "step": 30890 + }, + { + "epoch": 1.1930962585428009, + "grad_norm": 1.506726861000061, + "learning_rate": 0.00012046282353244012, + "loss": 0.1712, + "step": 30900 + }, + { + "epoch": 1.1934823738368276, + "grad_norm": 1.5069276094436646, + "learning_rate": 0.00012043708251283834, + "loss": 0.399, + "step": 30910 + }, + { + "epoch": 1.1938684891308544, + "grad_norm": 0.09606973081827164, + "learning_rate": 0.00012041134149323656, + "loss": 0.1226, + "step": 30920 + }, + { + "epoch": 1.1942546044248812, + "grad_norm": 1.7927659749984741, + "learning_rate": 0.00012038560047363476, + "loss": 0.0975, + "step": 30930 + }, + { + "epoch": 1.1946407197189082, + "grad_norm": 2.2988364696502686, + "learning_rate": 0.00012035985945403298, + "loss": 0.2742, + "step": 30940 + }, + { + "epoch": 1.195026835012935, + "grad_norm": 0.4412599205970764, + "learning_rate": 0.00012033411843443118, + "loss": 0.2008, + "step": 30950 + }, + { + "epoch": 1.1954129503069617, + "grad_norm": 3.227698564529419, + "learning_rate": 0.00012030837741482941, + "loss": 0.2797, + "step": 30960 + }, + { + "epoch": 1.1957990656009885, + "grad_norm": 1.3904643058776855, + "learning_rate": 0.00012028263639522761, + "loss": 0.1636, + "step": 30970 + }, + { + "epoch": 1.1961851808950152, + "grad_norm": 1.6495708227157593, + "learning_rate": 0.00012025689537562584, + "loss": 0.2768, + "step": 30980 + }, + { + "epoch": 1.196571296189042, + "grad_norm": 0.3142000436782837, + "learning_rate": 0.00012023115435602405, + "loss": 0.1797, + "step": 30990 + }, + { + "epoch": 1.1969574114830688, + "grad_norm": 1.562090516090393, + "learning_rate": 0.00012020541333642225, + "loss": 0.3175, + "step": 31000 + }, + { + "epoch": 1.1973435267770955, + "grad_norm": 1.3837881088256836, + "learning_rate": 0.00012017967231682048, + "loss": 0.3137, + "step": 31010 + }, + { + "epoch": 1.1977296420711223, + "grad_norm": 1.1495468616485596, + "learning_rate": 0.00012015393129721868, + "loss": 0.232, + "step": 31020 + }, + { + "epoch": 1.1981157573651493, + "grad_norm": 1.562514305114746, + "learning_rate": 0.0001201281902776169, + "loss": 0.3455, + "step": 31030 + }, + { + "epoch": 1.198501872659176, + "grad_norm": 1.626610517501831, + "learning_rate": 0.00012010244925801513, + "loss": 0.2382, + "step": 31040 + }, + { + "epoch": 1.1988879879532028, + "grad_norm": 2.5863585472106934, + "learning_rate": 0.00012007670823841333, + "loss": 0.288, + "step": 31050 + }, + { + "epoch": 1.1992741032472296, + "grad_norm": 0.9751160740852356, + "learning_rate": 0.00012005096721881154, + "loss": 0.3236, + "step": 31060 + }, + { + "epoch": 1.1996602185412564, + "grad_norm": 1.7428686618804932, + "learning_rate": 0.00012002522619920974, + "loss": 0.3707, + "step": 31070 + }, + { + "epoch": 1.2000463338352831, + "grad_norm": 1.0963342189788818, + "learning_rate": 0.00011999948517960797, + "loss": 0.2991, + "step": 31080 + }, + { + "epoch": 1.20043244912931, + "grad_norm": 2.0942695140838623, + "learning_rate": 0.00011997374416000617, + "loss": 0.2611, + "step": 31090 + }, + { + "epoch": 1.200818564423337, + "grad_norm": 0.11989244073629379, + "learning_rate": 0.0001199480031404044, + "loss": 0.2071, + "step": 31100 + }, + { + "epoch": 1.2012046797173637, + "grad_norm": 0.44892773032188416, + "learning_rate": 0.00011992226212080262, + "loss": 0.2105, + "step": 31110 + }, + { + "epoch": 1.2015907950113904, + "grad_norm": 0.0861232578754425, + "learning_rate": 0.00011989652110120082, + "loss": 0.1906, + "step": 31120 + }, + { + "epoch": 1.2019769103054172, + "grad_norm": 1.9162683486938477, + "learning_rate": 0.00011987078008159904, + "loss": 0.1929, + "step": 31130 + }, + { + "epoch": 1.202363025599444, + "grad_norm": 1.3750224113464355, + "learning_rate": 0.00011984503906199725, + "loss": 0.3072, + "step": 31140 + }, + { + "epoch": 1.2027491408934707, + "grad_norm": 0.49282577633857727, + "learning_rate": 0.00011981929804239546, + "loss": 0.3082, + "step": 31150 + }, + { + "epoch": 1.2031352561874975, + "grad_norm": 1.0578473806381226, + "learning_rate": 0.00011979355702279366, + "loss": 0.2498, + "step": 31160 + }, + { + "epoch": 1.2035213714815245, + "grad_norm": 0.6070756316184998, + "learning_rate": 0.00011976781600319189, + "loss": 0.2324, + "step": 31170 + }, + { + "epoch": 1.2039074867755513, + "grad_norm": 3.2918875217437744, + "learning_rate": 0.00011974207498359012, + "loss": 0.4218, + "step": 31180 + }, + { + "epoch": 1.204293602069578, + "grad_norm": 0.3284684419631958, + "learning_rate": 0.00011971633396398832, + "loss": 0.2713, + "step": 31190 + }, + { + "epoch": 1.2046797173636048, + "grad_norm": 0.8733110427856445, + "learning_rate": 0.00011969059294438653, + "loss": 0.328, + "step": 31200 + }, + { + "epoch": 1.2050658326576316, + "grad_norm": 1.787016749382019, + "learning_rate": 0.00011966485192478474, + "loss": 0.2115, + "step": 31210 + }, + { + "epoch": 1.2054519479516583, + "grad_norm": 0.7539357542991638, + "learning_rate": 0.00011963911090518296, + "loss": 0.3585, + "step": 31220 + }, + { + "epoch": 1.205838063245685, + "grad_norm": 0.32271450757980347, + "learning_rate": 0.00011961336988558118, + "loss": 0.2472, + "step": 31230 + }, + { + "epoch": 1.2062241785397119, + "grad_norm": 0.85898756980896, + "learning_rate": 0.00011958762886597938, + "loss": 0.2367, + "step": 31240 + }, + { + "epoch": 1.2066102938337386, + "grad_norm": 0.6451173424720764, + "learning_rate": 0.00011956188784637761, + "loss": 0.2309, + "step": 31250 + }, + { + "epoch": 1.2069964091277656, + "grad_norm": 2.1043314933776855, + "learning_rate": 0.00011953614682677581, + "loss": 0.316, + "step": 31260 + }, + { + "epoch": 1.2073825244217924, + "grad_norm": 2.9022254943847656, + "learning_rate": 0.00011951040580717404, + "loss": 0.3161, + "step": 31270 + }, + { + "epoch": 1.2077686397158192, + "grad_norm": 0.7839256525039673, + "learning_rate": 0.00011948466478757224, + "loss": 0.4014, + "step": 31280 + }, + { + "epoch": 1.208154755009846, + "grad_norm": 1.024190902709961, + "learning_rate": 0.00011945892376797045, + "loss": 0.2333, + "step": 31290 + }, + { + "epoch": 1.2085408703038727, + "grad_norm": 1.433605670928955, + "learning_rate": 0.00011943318274836868, + "loss": 0.2014, + "step": 31300 + }, + { + "epoch": 1.2089269855978995, + "grad_norm": 0.7208861112594604, + "learning_rate": 0.00011940744172876688, + "loss": 0.1786, + "step": 31310 + }, + { + "epoch": 1.2093131008919262, + "grad_norm": 2.5464839935302734, + "learning_rate": 0.0001193817007091651, + "loss": 0.3646, + "step": 31320 + }, + { + "epoch": 1.2096992161859532, + "grad_norm": 1.6425135135650635, + "learning_rate": 0.0001193559596895633, + "loss": 0.2603, + "step": 31330 + }, + { + "epoch": 1.21008533147998, + "grad_norm": 1.7573461532592773, + "learning_rate": 0.00011933021866996153, + "loss": 0.315, + "step": 31340 + }, + { + "epoch": 1.2104714467740068, + "grad_norm": 1.884445071220398, + "learning_rate": 0.00011930447765035974, + "loss": 0.2326, + "step": 31350 + }, + { + "epoch": 1.2108575620680335, + "grad_norm": 1.2781214714050293, + "learning_rate": 0.00011927873663075794, + "loss": 0.2248, + "step": 31360 + }, + { + "epoch": 1.2112436773620603, + "grad_norm": 0.5210689902305603, + "learning_rate": 0.00011925299561115617, + "loss": 0.2517, + "step": 31370 + }, + { + "epoch": 1.211629792656087, + "grad_norm": 0.4265996515750885, + "learning_rate": 0.00011922725459155437, + "loss": 0.257, + "step": 31380 + }, + { + "epoch": 1.2120159079501138, + "grad_norm": 0.7833511233329773, + "learning_rate": 0.0001192015135719526, + "loss": 0.3117, + "step": 31390 + }, + { + "epoch": 1.2124020232441408, + "grad_norm": 5.922067165374756, + "learning_rate": 0.0001191757725523508, + "loss": 0.2949, + "step": 31400 + }, + { + "epoch": 1.2127881385381676, + "grad_norm": 3.2208797931671143, + "learning_rate": 0.00011915003153274902, + "loss": 0.3616, + "step": 31410 + }, + { + "epoch": 1.2131742538321943, + "grad_norm": 1.6283681392669678, + "learning_rate": 0.00011912429051314724, + "loss": 0.1951, + "step": 31420 + }, + { + "epoch": 1.2135603691262211, + "grad_norm": 1.2450298070907593, + "learning_rate": 0.00011909854949354544, + "loss": 0.2313, + "step": 31430 + }, + { + "epoch": 1.2139464844202479, + "grad_norm": 0.8394368290901184, + "learning_rate": 0.00011907280847394366, + "loss": 0.2993, + "step": 31440 + }, + { + "epoch": 1.2143325997142747, + "grad_norm": 0.6099762320518494, + "learning_rate": 0.00011904706745434186, + "loss": 0.1527, + "step": 31450 + }, + { + "epoch": 1.2147187150083014, + "grad_norm": 0.7743226885795593, + "learning_rate": 0.00011902132643474009, + "loss": 0.2608, + "step": 31460 + }, + { + "epoch": 1.2151048303023282, + "grad_norm": 3.343604326248169, + "learning_rate": 0.00011899558541513829, + "loss": 0.3031, + "step": 31470 + }, + { + "epoch": 1.215490945596355, + "grad_norm": 1.016830563545227, + "learning_rate": 0.00011896984439553652, + "loss": 0.2146, + "step": 31480 + }, + { + "epoch": 1.215877060890382, + "grad_norm": 1.0264688730239868, + "learning_rate": 0.00011894410337593473, + "loss": 0.2905, + "step": 31490 + }, + { + "epoch": 1.2162631761844087, + "grad_norm": 1.5718705654144287, + "learning_rate": 0.00011891836235633293, + "loss": 0.2928, + "step": 31500 + }, + { + "epoch": 1.2166492914784355, + "grad_norm": 0.9886181950569153, + "learning_rate": 0.00011889262133673116, + "loss": 0.2129, + "step": 31510 + }, + { + "epoch": 1.2170354067724622, + "grad_norm": 0.6496497392654419, + "learning_rate": 0.00011886688031712936, + "loss": 0.2388, + "step": 31520 + }, + { + "epoch": 1.217421522066489, + "grad_norm": 2.0419809818267822, + "learning_rate": 0.00011884113929752758, + "loss": 0.1835, + "step": 31530 + }, + { + "epoch": 1.2178076373605158, + "grad_norm": 0.41370299458503723, + "learning_rate": 0.00011881539827792581, + "loss": 0.3099, + "step": 31540 + }, + { + "epoch": 1.2181937526545425, + "grad_norm": 0.6608594655990601, + "learning_rate": 0.00011878965725832401, + "loss": 0.4235, + "step": 31550 + }, + { + "epoch": 1.2185798679485695, + "grad_norm": 0.4206163287162781, + "learning_rate": 0.00011876391623872222, + "loss": 0.2384, + "step": 31560 + }, + { + "epoch": 1.2189659832425963, + "grad_norm": 0.2976207137107849, + "learning_rate": 0.00011873817521912042, + "loss": 0.4792, + "step": 31570 + }, + { + "epoch": 1.219352098536623, + "grad_norm": 2.237607002258301, + "learning_rate": 0.00011871243419951865, + "loss": 0.4694, + "step": 31580 + }, + { + "epoch": 1.2197382138306498, + "grad_norm": 2.1085827350616455, + "learning_rate": 0.00011868669317991685, + "loss": 0.3475, + "step": 31590 + }, + { + "epoch": 1.2201243291246766, + "grad_norm": 1.5770317316055298, + "learning_rate": 0.00011866095216031508, + "loss": 0.3676, + "step": 31600 + }, + { + "epoch": 1.2205104444187034, + "grad_norm": 1.7337925434112549, + "learning_rate": 0.0001186352111407133, + "loss": 0.2663, + "step": 31610 + }, + { + "epoch": 1.2208965597127301, + "grad_norm": 0.23846319317817688, + "learning_rate": 0.0001186094701211115, + "loss": 0.1916, + "step": 31620 + }, + { + "epoch": 1.2212826750067571, + "grad_norm": 1.1316229104995728, + "learning_rate": 0.00011858372910150972, + "loss": 0.1924, + "step": 31630 + }, + { + "epoch": 1.221668790300784, + "grad_norm": 0.7642451524734497, + "learning_rate": 0.00011855798808190792, + "loss": 0.3786, + "step": 31640 + }, + { + "epoch": 1.2220549055948107, + "grad_norm": 1.975182056427002, + "learning_rate": 0.00011853224706230614, + "loss": 0.2646, + "step": 31650 + }, + { + "epoch": 1.2224410208888374, + "grad_norm": 1.5172406435012817, + "learning_rate": 0.00011850650604270437, + "loss": 0.2824, + "step": 31660 + }, + { + "epoch": 1.2228271361828642, + "grad_norm": 1.5113869905471802, + "learning_rate": 0.00011848076502310257, + "loss": 0.2959, + "step": 31670 + }, + { + "epoch": 1.223213251476891, + "grad_norm": 2.380364418029785, + "learning_rate": 0.0001184550240035008, + "loss": 0.2425, + "step": 31680 + }, + { + "epoch": 1.2235993667709177, + "grad_norm": 0.11588902771472931, + "learning_rate": 0.000118429282983899, + "loss": 0.1665, + "step": 31690 + }, + { + "epoch": 1.2239854820649445, + "grad_norm": 2.045466184616089, + "learning_rate": 0.00011840354196429721, + "loss": 0.2478, + "step": 31700 + }, + { + "epoch": 1.2243715973589713, + "grad_norm": 4.201779365539551, + "learning_rate": 0.00011837780094469542, + "loss": 0.3295, + "step": 31710 + }, + { + "epoch": 1.2247577126529983, + "grad_norm": 1.1080553531646729, + "learning_rate": 0.00011835205992509364, + "loss": 0.2102, + "step": 31720 + }, + { + "epoch": 1.225143827947025, + "grad_norm": 0.3263179659843445, + "learning_rate": 0.00011832631890549186, + "loss": 0.2822, + "step": 31730 + }, + { + "epoch": 1.2255299432410518, + "grad_norm": 0.7899855971336365, + "learning_rate": 0.00011830057788589006, + "loss": 0.1462, + "step": 31740 + }, + { + "epoch": 1.2259160585350786, + "grad_norm": 1.330854058265686, + "learning_rate": 0.00011827483686628829, + "loss": 0.3883, + "step": 31750 + }, + { + "epoch": 1.2263021738291053, + "grad_norm": 0.40446966886520386, + "learning_rate": 0.00011824909584668649, + "loss": 0.1592, + "step": 31760 + }, + { + "epoch": 1.226688289123132, + "grad_norm": 0.49187901616096497, + "learning_rate": 0.0001182233548270847, + "loss": 0.0698, + "step": 31770 + }, + { + "epoch": 1.2270744044171589, + "grad_norm": 4.227303504943848, + "learning_rate": 0.00011819761380748292, + "loss": 0.3945, + "step": 31780 + }, + { + "epoch": 1.2274605197111859, + "grad_norm": 2.520578622817993, + "learning_rate": 0.00011817187278788113, + "loss": 0.4552, + "step": 31790 + }, + { + "epoch": 1.2278466350052126, + "grad_norm": 0.9486772418022156, + "learning_rate": 0.00011814613176827936, + "loss": 0.232, + "step": 31800 + }, + { + "epoch": 1.2282327502992394, + "grad_norm": 0.8336694836616516, + "learning_rate": 0.00011812039074867756, + "loss": 0.183, + "step": 31810 + }, + { + "epoch": 1.2286188655932662, + "grad_norm": 1.576710820198059, + "learning_rate": 0.00011809464972907578, + "loss": 0.4116, + "step": 31820 + }, + { + "epoch": 1.229004980887293, + "grad_norm": 0.2388358861207962, + "learning_rate": 0.00011806890870947398, + "loss": 0.3509, + "step": 31830 + }, + { + "epoch": 1.2293910961813197, + "grad_norm": 1.054880142211914, + "learning_rate": 0.00011804316768987221, + "loss": 0.1669, + "step": 31840 + }, + { + "epoch": 1.2297772114753465, + "grad_norm": 0.7959414720535278, + "learning_rate": 0.00011801742667027042, + "loss": 0.2073, + "step": 31850 + }, + { + "epoch": 1.2301633267693735, + "grad_norm": 2.480940103530884, + "learning_rate": 0.00011799168565066862, + "loss": 0.3589, + "step": 31860 + }, + { + "epoch": 1.2305494420634002, + "grad_norm": 0.9381177425384521, + "learning_rate": 0.00011796594463106685, + "loss": 0.2576, + "step": 31870 + }, + { + "epoch": 1.230935557357427, + "grad_norm": 9.548282623291016, + "learning_rate": 0.00011794020361146505, + "loss": 0.1755, + "step": 31880 + }, + { + "epoch": 1.2313216726514538, + "grad_norm": 0.4922927916049957, + "learning_rate": 0.00011791446259186328, + "loss": 0.2173, + "step": 31890 + }, + { + "epoch": 1.2317077879454805, + "grad_norm": 1.2846627235412598, + "learning_rate": 0.00011788872157226148, + "loss": 0.2039, + "step": 31900 + }, + { + "epoch": 1.2320939032395073, + "grad_norm": 0.9321876764297485, + "learning_rate": 0.0001178629805526597, + "loss": 0.2418, + "step": 31910 + }, + { + "epoch": 1.232480018533534, + "grad_norm": 1.373787760734558, + "learning_rate": 0.00011783723953305792, + "loss": 0.3065, + "step": 31920 + }, + { + "epoch": 1.2328661338275608, + "grad_norm": 1.915208339691162, + "learning_rate": 0.00011781149851345612, + "loss": 0.292, + "step": 31930 + }, + { + "epoch": 1.2332522491215876, + "grad_norm": 1.533010482788086, + "learning_rate": 0.00011778575749385434, + "loss": 0.2234, + "step": 31940 + }, + { + "epoch": 1.2336383644156146, + "grad_norm": 0.5740505456924438, + "learning_rate": 0.00011776001647425254, + "loss": 0.2513, + "step": 31950 + }, + { + "epoch": 1.2340244797096414, + "grad_norm": 3.164320230484009, + "learning_rate": 0.00011773427545465077, + "loss": 0.4233, + "step": 31960 + }, + { + "epoch": 1.2344105950036681, + "grad_norm": 0.8309218287467957, + "learning_rate": 0.00011770853443504897, + "loss": 0.4629, + "step": 31970 + }, + { + "epoch": 1.2347967102976949, + "grad_norm": 2.0942423343658447, + "learning_rate": 0.0001176827934154472, + "loss": 0.3381, + "step": 31980 + }, + { + "epoch": 1.2351828255917217, + "grad_norm": 0.4961155354976654, + "learning_rate": 0.00011765705239584541, + "loss": 0.1265, + "step": 31990 + }, + { + "epoch": 1.2355689408857484, + "grad_norm": 1.6777870655059814, + "learning_rate": 0.00011763131137624361, + "loss": 0.2503, + "step": 32000 + }, + { + "epoch": 1.2359550561797752, + "grad_norm": 0.6016990542411804, + "learning_rate": 0.00011760557035664184, + "loss": 0.3944, + "step": 32010 + }, + { + "epoch": 1.2363411714738022, + "grad_norm": 1.9123533964157104, + "learning_rate": 0.00011757982933704004, + "loss": 0.463, + "step": 32020 + }, + { + "epoch": 1.236727286767829, + "grad_norm": 1.5328630208969116, + "learning_rate": 0.00011755408831743826, + "loss": 0.3368, + "step": 32030 + }, + { + "epoch": 1.2371134020618557, + "grad_norm": 1.0984220504760742, + "learning_rate": 0.00011752834729783649, + "loss": 0.1987, + "step": 32040 + }, + { + "epoch": 1.2374995173558825, + "grad_norm": 3.1570327281951904, + "learning_rate": 0.00011750260627823469, + "loss": 0.454, + "step": 32050 + }, + { + "epoch": 1.2378856326499093, + "grad_norm": 3.7589874267578125, + "learning_rate": 0.0001174768652586329, + "loss": 0.2953, + "step": 32060 + }, + { + "epoch": 1.238271747943936, + "grad_norm": 1.3289847373962402, + "learning_rate": 0.0001174511242390311, + "loss": 0.3058, + "step": 32070 + }, + { + "epoch": 1.2386578632379628, + "grad_norm": 0.9872431755065918, + "learning_rate": 0.00011742538321942933, + "loss": 0.1312, + "step": 32080 + }, + { + "epoch": 1.2390439785319898, + "grad_norm": 1.799133062362671, + "learning_rate": 0.00011739964219982753, + "loss": 0.3271, + "step": 32090 + }, + { + "epoch": 1.2394300938260165, + "grad_norm": 0.13501858711242676, + "learning_rate": 0.00011737390118022576, + "loss": 0.4462, + "step": 32100 + }, + { + "epoch": 1.2398162091200433, + "grad_norm": 1.53009033203125, + "learning_rate": 0.00011734816016062398, + "loss": 0.1311, + "step": 32110 + }, + { + "epoch": 1.24020232441407, + "grad_norm": 0.5737781524658203, + "learning_rate": 0.00011732241914102218, + "loss": 0.1705, + "step": 32120 + }, + { + "epoch": 1.2405884397080968, + "grad_norm": 2.095947027206421, + "learning_rate": 0.0001172966781214204, + "loss": 0.2195, + "step": 32130 + }, + { + "epoch": 1.2409745550021236, + "grad_norm": 2.2158639430999756, + "learning_rate": 0.0001172709371018186, + "loss": 0.2964, + "step": 32140 + }, + { + "epoch": 1.2413606702961504, + "grad_norm": 0.3154670000076294, + "learning_rate": 0.00011724519608221682, + "loss": 0.245, + "step": 32150 + }, + { + "epoch": 1.2417467855901771, + "grad_norm": 1.4467942714691162, + "learning_rate": 0.00011721945506261505, + "loss": 0.3469, + "step": 32160 + }, + { + "epoch": 1.242132900884204, + "grad_norm": 0.24252896010875702, + "learning_rate": 0.00011719371404301325, + "loss": 0.2217, + "step": 32170 + }, + { + "epoch": 1.242519016178231, + "grad_norm": 2.4256465435028076, + "learning_rate": 0.00011716797302341148, + "loss": 0.4707, + "step": 32180 + }, + { + "epoch": 1.2429051314722577, + "grad_norm": 0.5917278528213501, + "learning_rate": 0.00011714223200380968, + "loss": 0.2284, + "step": 32190 + }, + { + "epoch": 1.2432912467662844, + "grad_norm": 1.2977256774902344, + "learning_rate": 0.00011711649098420789, + "loss": 0.3612, + "step": 32200 + }, + { + "epoch": 1.2436773620603112, + "grad_norm": 1.296558141708374, + "learning_rate": 0.00011709074996460609, + "loss": 0.2231, + "step": 32210 + }, + { + "epoch": 1.244063477354338, + "grad_norm": 1.6559596061706543, + "learning_rate": 0.00011706500894500432, + "loss": 0.2636, + "step": 32220 + }, + { + "epoch": 1.2444495926483647, + "grad_norm": 1.6342560052871704, + "learning_rate": 0.00011703926792540254, + "loss": 0.1276, + "step": 32230 + }, + { + "epoch": 1.2448357079423915, + "grad_norm": 1.1173146963119507, + "learning_rate": 0.00011701352690580074, + "loss": 0.1719, + "step": 32240 + }, + { + "epoch": 1.2452218232364185, + "grad_norm": 0.29269275069236755, + "learning_rate": 0.00011698778588619897, + "loss": 0.3323, + "step": 32250 + }, + { + "epoch": 1.2456079385304453, + "grad_norm": 2.518568515777588, + "learning_rate": 0.00011696204486659717, + "loss": 0.3302, + "step": 32260 + }, + { + "epoch": 1.245994053824472, + "grad_norm": 2.535940647125244, + "learning_rate": 0.00011693630384699538, + "loss": 0.1488, + "step": 32270 + }, + { + "epoch": 1.2463801691184988, + "grad_norm": 0.384199321269989, + "learning_rate": 0.00011691056282739358, + "loss": 0.0957, + "step": 32280 + }, + { + "epoch": 1.2467662844125256, + "grad_norm": 3.7460570335388184, + "learning_rate": 0.00011688482180779181, + "loss": 0.311, + "step": 32290 + }, + { + "epoch": 1.2471523997065523, + "grad_norm": 1.285152792930603, + "learning_rate": 0.00011685908078819004, + "loss": 0.1872, + "step": 32300 + }, + { + "epoch": 1.247538515000579, + "grad_norm": 0.6118050217628479, + "learning_rate": 0.00011683333976858823, + "loss": 0.1635, + "step": 32310 + }, + { + "epoch": 1.2479246302946059, + "grad_norm": 2.9890856742858887, + "learning_rate": 0.00011680759874898646, + "loss": 0.2773, + "step": 32320 + }, + { + "epoch": 1.2483107455886326, + "grad_norm": 0.15415504574775696, + "learning_rate": 0.00011678185772938466, + "loss": 0.3014, + "step": 32330 + }, + { + "epoch": 1.2486968608826596, + "grad_norm": 1.2717432975769043, + "learning_rate": 0.00011675611670978287, + "loss": 0.2442, + "step": 32340 + }, + { + "epoch": 1.2490829761766864, + "grad_norm": 0.19885335862636566, + "learning_rate": 0.0001167303756901811, + "loss": 0.1894, + "step": 32350 + }, + { + "epoch": 1.2494690914707132, + "grad_norm": 1.425176978111267, + "learning_rate": 0.0001167046346705793, + "loss": 0.3008, + "step": 32360 + }, + { + "epoch": 1.24985520676474, + "grad_norm": 1.9033544063568115, + "learning_rate": 0.00011667889365097753, + "loss": 0.3072, + "step": 32370 + }, + { + "epoch": 1.2502413220587667, + "grad_norm": 0.6677396297454834, + "learning_rate": 0.00011665315263137573, + "loss": 0.2378, + "step": 32380 + }, + { + "epoch": 1.2506274373527935, + "grad_norm": 0.4491410255432129, + "learning_rate": 0.00011662741161177396, + "loss": 0.1071, + "step": 32390 + }, + { + "epoch": 1.2510135526468202, + "grad_norm": 2.7697031497955322, + "learning_rate": 0.00011660167059217215, + "loss": 0.3276, + "step": 32400 + }, + { + "epoch": 1.2513996679408472, + "grad_norm": 1.4374775886535645, + "learning_rate": 0.00011657592957257037, + "loss": 0.2459, + "step": 32410 + }, + { + "epoch": 1.251785783234874, + "grad_norm": 1.5245740413665771, + "learning_rate": 0.0001165501885529686, + "loss": 0.3841, + "step": 32420 + }, + { + "epoch": 1.2521718985289008, + "grad_norm": 0.5069687366485596, + "learning_rate": 0.0001165244475333668, + "loss": 0.1425, + "step": 32430 + }, + { + "epoch": 1.2525580138229275, + "grad_norm": 0.6181765198707581, + "learning_rate": 0.00011649870651376502, + "loss": 0.4184, + "step": 32440 + }, + { + "epoch": 1.2529441291169543, + "grad_norm": 2.007375717163086, + "learning_rate": 0.00011647296549416322, + "loss": 0.1754, + "step": 32450 + }, + { + "epoch": 1.253330244410981, + "grad_norm": 0.27261993288993835, + "learning_rate": 0.00011644722447456145, + "loss": 0.3608, + "step": 32460 + }, + { + "epoch": 1.2537163597050078, + "grad_norm": 0.3452116549015045, + "learning_rate": 0.00011642148345495965, + "loss": 0.1913, + "step": 32470 + }, + { + "epoch": 1.2541024749990348, + "grad_norm": 2.553738594055176, + "learning_rate": 0.00011639574243535787, + "loss": 0.2221, + "step": 32480 + }, + { + "epoch": 1.2544885902930616, + "grad_norm": 0.795706033706665, + "learning_rate": 0.00011637000141575609, + "loss": 0.2348, + "step": 32490 + }, + { + "epoch": 1.2548747055870884, + "grad_norm": 0.519059956073761, + "learning_rate": 0.00011634426039615429, + "loss": 0.2874, + "step": 32500 + }, + { + "epoch": 1.2552608208811151, + "grad_norm": 2.7050392627716064, + "learning_rate": 0.00011631851937655251, + "loss": 0.3821, + "step": 32510 + }, + { + "epoch": 1.255646936175142, + "grad_norm": 2.09036922454834, + "learning_rate": 0.00011629277835695071, + "loss": 0.2562, + "step": 32520 + }, + { + "epoch": 1.2560330514691687, + "grad_norm": 1.156534194946289, + "learning_rate": 0.00011626703733734894, + "loss": 0.1373, + "step": 32530 + }, + { + "epoch": 1.2564191667631954, + "grad_norm": 2.5214719772338867, + "learning_rate": 0.00011624129631774715, + "loss": 0.2955, + "step": 32540 + }, + { + "epoch": 1.2568052820572224, + "grad_norm": 2.71368408203125, + "learning_rate": 0.00011621555529814537, + "loss": 0.2832, + "step": 32550 + }, + { + "epoch": 1.257191397351249, + "grad_norm": 1.488390564918518, + "learning_rate": 0.00011618981427854358, + "loss": 0.2369, + "step": 32560 + }, + { + "epoch": 1.257577512645276, + "grad_norm": 1.7705353498458862, + "learning_rate": 0.00011616407325894178, + "loss": 0.4351, + "step": 32570 + }, + { + "epoch": 1.2579636279393027, + "grad_norm": 4.3224406242370605, + "learning_rate": 0.00011613833223934001, + "loss": 0.3365, + "step": 32580 + }, + { + "epoch": 1.2583497432333295, + "grad_norm": 0.9157351851463318, + "learning_rate": 0.00011611259121973821, + "loss": 0.2882, + "step": 32590 + }, + { + "epoch": 1.2587358585273563, + "grad_norm": 2.9030823707580566, + "learning_rate": 0.00011608685020013643, + "loss": 0.343, + "step": 32600 + }, + { + "epoch": 1.259121973821383, + "grad_norm": 0.807424783706665, + "learning_rate": 0.00011606110918053466, + "loss": 0.2413, + "step": 32610 + }, + { + "epoch": 1.2595080891154098, + "grad_norm": 1.8564451932907104, + "learning_rate": 0.00011603536816093286, + "loss": 0.5403, + "step": 32620 + }, + { + "epoch": 1.2598942044094366, + "grad_norm": 0.8169263005256653, + "learning_rate": 0.00011600962714133107, + "loss": 0.3522, + "step": 32630 + }, + { + "epoch": 1.2602803197034635, + "grad_norm": 0.9638017416000366, + "learning_rate": 0.00011598388612172927, + "loss": 0.3295, + "step": 32640 + }, + { + "epoch": 1.2606664349974903, + "grad_norm": 1.8613898754119873, + "learning_rate": 0.0001159581451021275, + "loss": 0.3107, + "step": 32650 + }, + { + "epoch": 1.261052550291517, + "grad_norm": 2.582638740539551, + "learning_rate": 0.00011593240408252573, + "loss": 0.2767, + "step": 32660 + }, + { + "epoch": 1.2614386655855439, + "grad_norm": 1.8227603435516357, + "learning_rate": 0.00011590666306292393, + "loss": 0.2324, + "step": 32670 + }, + { + "epoch": 1.2618247808795706, + "grad_norm": 0.3827721178531647, + "learning_rate": 0.00011588092204332215, + "loss": 0.4926, + "step": 32680 + }, + { + "epoch": 1.2622108961735974, + "grad_norm": 1.5523405075073242, + "learning_rate": 0.00011585518102372035, + "loss": 0.4475, + "step": 32690 + }, + { + "epoch": 1.2625970114676242, + "grad_norm": 0.25254619121551514, + "learning_rate": 0.00011582944000411857, + "loss": 0.2333, + "step": 32700 + }, + { + "epoch": 1.2629831267616511, + "grad_norm": 0.5530809164047241, + "learning_rate": 0.00011580369898451677, + "loss": 0.2781, + "step": 32710 + }, + { + "epoch": 1.2633692420556777, + "grad_norm": 0.3202857971191406, + "learning_rate": 0.000115777957964915, + "loss": 0.2596, + "step": 32720 + }, + { + "epoch": 1.2637553573497047, + "grad_norm": 0.25663653016090393, + "learning_rate": 0.00011575221694531322, + "loss": 0.2096, + "step": 32730 + }, + { + "epoch": 1.2641414726437314, + "grad_norm": 2.423585891723633, + "learning_rate": 0.00011572647592571142, + "loss": 0.3064, + "step": 32740 + }, + { + "epoch": 1.2645275879377582, + "grad_norm": 0.2529740631580353, + "learning_rate": 0.00011570073490610965, + "loss": 0.1426, + "step": 32750 + }, + { + "epoch": 1.264913703231785, + "grad_norm": 0.5238136053085327, + "learning_rate": 0.00011567499388650785, + "loss": 0.1598, + "step": 32760 + }, + { + "epoch": 1.2652998185258117, + "grad_norm": 0.4495049715042114, + "learning_rate": 0.00011564925286690606, + "loss": 0.1712, + "step": 32770 + }, + { + "epoch": 1.2656859338198387, + "grad_norm": 0.3863711953163147, + "learning_rate": 0.00011562351184730426, + "loss": 0.1015, + "step": 32780 + }, + { + "epoch": 1.2660720491138653, + "grad_norm": 1.3139948844909668, + "learning_rate": 0.00011559777082770249, + "loss": 0.3982, + "step": 32790 + }, + { + "epoch": 1.2664581644078923, + "grad_norm": 0.40500277280807495, + "learning_rate": 0.00011557202980810071, + "loss": 0.4211, + "step": 32800 + }, + { + "epoch": 1.266844279701919, + "grad_norm": 0.9749487042427063, + "learning_rate": 0.00011554628878849891, + "loss": 0.2773, + "step": 32810 + }, + { + "epoch": 1.2672303949959458, + "grad_norm": 1.1194144487380981, + "learning_rate": 0.00011552054776889714, + "loss": 0.2771, + "step": 32820 + }, + { + "epoch": 1.2676165102899726, + "grad_norm": 1.4098531007766724, + "learning_rate": 0.00011549480674929534, + "loss": 0.2044, + "step": 32830 + }, + { + "epoch": 1.2680026255839993, + "grad_norm": 1.3184692859649658, + "learning_rate": 0.00011546906572969355, + "loss": 0.2633, + "step": 32840 + }, + { + "epoch": 1.268388740878026, + "grad_norm": 0.9776495099067688, + "learning_rate": 0.00011544332471009178, + "loss": 0.3212, + "step": 32850 + }, + { + "epoch": 1.2687748561720529, + "grad_norm": 1.1577869653701782, + "learning_rate": 0.00011541758369048998, + "loss": 0.4271, + "step": 32860 + }, + { + "epoch": 1.2691609714660799, + "grad_norm": 2.2742063999176025, + "learning_rate": 0.00011539184267088821, + "loss": 0.2044, + "step": 32870 + }, + { + "epoch": 1.2695470867601066, + "grad_norm": 1.2156758308410645, + "learning_rate": 0.00011536610165128641, + "loss": 0.2394, + "step": 32880 + }, + { + "epoch": 1.2699332020541334, + "grad_norm": 1.9160290956497192, + "learning_rate": 0.00011534036063168463, + "loss": 0.3733, + "step": 32890 + }, + { + "epoch": 1.2703193173481602, + "grad_norm": 1.3484338521957397, + "learning_rate": 0.00011531461961208283, + "loss": 0.3946, + "step": 32900 + }, + { + "epoch": 1.270705432642187, + "grad_norm": 1.0480244159698486, + "learning_rate": 0.00011528887859248105, + "loss": 0.166, + "step": 32910 + }, + { + "epoch": 1.2710915479362137, + "grad_norm": 1.1980483531951904, + "learning_rate": 0.00011526313757287927, + "loss": 0.3134, + "step": 32920 + }, + { + "epoch": 1.2714776632302405, + "grad_norm": 1.2777379751205444, + "learning_rate": 0.00011523739655327747, + "loss": 0.2615, + "step": 32930 + }, + { + "epoch": 1.2718637785242675, + "grad_norm": 1.559792399406433, + "learning_rate": 0.0001152116555336757, + "loss": 0.2996, + "step": 32940 + }, + { + "epoch": 1.272249893818294, + "grad_norm": 1.9589471817016602, + "learning_rate": 0.0001151859145140739, + "loss": 0.2683, + "step": 32950 + }, + { + "epoch": 1.272636009112321, + "grad_norm": 0.23336051404476166, + "learning_rate": 0.00011516017349447213, + "loss": 0.3306, + "step": 32960 + }, + { + "epoch": 1.2730221244063478, + "grad_norm": 1.5898360013961792, + "learning_rate": 0.00011513443247487034, + "loss": 0.2891, + "step": 32970 + }, + { + "epoch": 1.2734082397003745, + "grad_norm": 0.5046207904815674, + "learning_rate": 0.00011510869145526854, + "loss": 0.1843, + "step": 32980 + }, + { + "epoch": 1.2737943549944013, + "grad_norm": 0.16588473320007324, + "learning_rate": 0.00011508295043566677, + "loss": 0.1065, + "step": 32990 + }, + { + "epoch": 1.274180470288428, + "grad_norm": 1.5499800443649292, + "learning_rate": 0.00011505720941606497, + "loss": 0.2795, + "step": 33000 + }, + { + "epoch": 1.274566585582455, + "grad_norm": 0.5019121766090393, + "learning_rate": 0.0001150314683964632, + "loss": 0.1857, + "step": 33010 + }, + { + "epoch": 1.2749527008764816, + "grad_norm": 0.258952260017395, + "learning_rate": 0.0001150057273768614, + "loss": 0.11, + "step": 33020 + }, + { + "epoch": 1.2753388161705086, + "grad_norm": 0.8540348410606384, + "learning_rate": 0.00011497998635725962, + "loss": 0.1852, + "step": 33030 + }, + { + "epoch": 1.2757249314645354, + "grad_norm": 0.08689398318529129, + "learning_rate": 0.00011495424533765783, + "loss": 0.2225, + "step": 33040 + }, + { + "epoch": 1.2761110467585621, + "grad_norm": 0.42253822088241577, + "learning_rate": 0.00011492850431805605, + "loss": 0.0751, + "step": 33050 + }, + { + "epoch": 1.276497162052589, + "grad_norm": 1.2964017391204834, + "learning_rate": 0.00011490276329845426, + "loss": 0.2384, + "step": 33060 + }, + { + "epoch": 1.2768832773466157, + "grad_norm": 0.5337836146354675, + "learning_rate": 0.00011487702227885246, + "loss": 0.1415, + "step": 33070 + }, + { + "epoch": 1.2772693926406424, + "grad_norm": 2.7771682739257812, + "learning_rate": 0.00011485128125925069, + "loss": 0.378, + "step": 33080 + }, + { + "epoch": 1.2776555079346692, + "grad_norm": 1.5107232332229614, + "learning_rate": 0.00011482554023964889, + "loss": 0.2482, + "step": 33090 + }, + { + "epoch": 1.2780416232286962, + "grad_norm": 0.6885499358177185, + "learning_rate": 0.00011479979922004711, + "loss": 0.2122, + "step": 33100 + }, + { + "epoch": 1.278427738522723, + "grad_norm": 0.9016557335853577, + "learning_rate": 0.00011477405820044533, + "loss": 0.2841, + "step": 33110 + }, + { + "epoch": 1.2788138538167497, + "grad_norm": 1.9532525539398193, + "learning_rate": 0.00011474831718084354, + "loss": 0.2281, + "step": 33120 + }, + { + "epoch": 1.2791999691107765, + "grad_norm": 2.1078782081604004, + "learning_rate": 0.00011472257616124175, + "loss": 0.2836, + "step": 33130 + }, + { + "epoch": 1.2795860844048033, + "grad_norm": 0.19830390810966492, + "learning_rate": 0.00011469683514163995, + "loss": 0.388, + "step": 33140 + }, + { + "epoch": 1.27997219969883, + "grad_norm": 0.17538850009441376, + "learning_rate": 0.00011467109412203818, + "loss": 0.3274, + "step": 33150 + }, + { + "epoch": 1.2803583149928568, + "grad_norm": 0.7402139902114868, + "learning_rate": 0.00011464535310243641, + "loss": 0.1979, + "step": 33160 + }, + { + "epoch": 1.2807444302868838, + "grad_norm": 0.2097146362066269, + "learning_rate": 0.00011461961208283461, + "loss": 0.2464, + "step": 33170 + }, + { + "epoch": 1.2811305455809103, + "grad_norm": 1.2441083192825317, + "learning_rate": 0.00011459387106323283, + "loss": 0.266, + "step": 33180 + }, + { + "epoch": 1.2815166608749373, + "grad_norm": 2.518852710723877, + "learning_rate": 0.00011456813004363103, + "loss": 0.253, + "step": 33190 + }, + { + "epoch": 1.281902776168964, + "grad_norm": 0.8078998327255249, + "learning_rate": 0.00011454238902402925, + "loss": 0.2361, + "step": 33200 + }, + { + "epoch": 1.2822888914629909, + "grad_norm": 1.2297371625900269, + "learning_rate": 0.00011451664800442745, + "loss": 0.1974, + "step": 33210 + }, + { + "epoch": 1.2826750067570176, + "grad_norm": 0.4303855895996094, + "learning_rate": 0.00011449090698482567, + "loss": 0.3563, + "step": 33220 + }, + { + "epoch": 1.2830611220510444, + "grad_norm": 1.3215210437774658, + "learning_rate": 0.0001144651659652239, + "loss": 0.2818, + "step": 33230 + }, + { + "epoch": 1.2834472373450712, + "grad_norm": 1.546265959739685, + "learning_rate": 0.0001144394249456221, + "loss": 0.5778, + "step": 33240 + }, + { + "epoch": 1.283833352639098, + "grad_norm": 0.8895953297615051, + "learning_rate": 0.00011441368392602033, + "loss": 0.2485, + "step": 33250 + }, + { + "epoch": 1.284219467933125, + "grad_norm": 0.7534870505332947, + "learning_rate": 0.00011438794290641853, + "loss": 0.2825, + "step": 33260 + }, + { + "epoch": 1.2846055832271517, + "grad_norm": 0.052820973098278046, + "learning_rate": 0.00011436220188681674, + "loss": 0.2191, + "step": 33270 + }, + { + "epoch": 1.2849916985211784, + "grad_norm": 0.9264475107192993, + "learning_rate": 0.00011433646086721494, + "loss": 0.181, + "step": 33280 + }, + { + "epoch": 1.2853778138152052, + "grad_norm": 0.2128441333770752, + "learning_rate": 0.00011431071984761317, + "loss": 0.1819, + "step": 33290 + }, + { + "epoch": 1.285763929109232, + "grad_norm": 0.5400950312614441, + "learning_rate": 0.0001142849788280114, + "loss": 0.4316, + "step": 33300 + }, + { + "epoch": 1.2861500444032588, + "grad_norm": 0.8033271431922913, + "learning_rate": 0.00011425923780840959, + "loss": 0.2146, + "step": 33310 + }, + { + "epoch": 1.2865361596972855, + "grad_norm": 2.012575149536133, + "learning_rate": 0.00011423349678880782, + "loss": 0.4335, + "step": 33320 + }, + { + "epoch": 1.2869222749913125, + "grad_norm": 0.7352376580238342, + "learning_rate": 0.00011420775576920602, + "loss": 0.2124, + "step": 33330 + }, + { + "epoch": 1.2873083902853393, + "grad_norm": 0.769036591053009, + "learning_rate": 0.00011418201474960423, + "loss": 0.3602, + "step": 33340 + }, + { + "epoch": 1.287694505579366, + "grad_norm": 0.250592976808548, + "learning_rate": 0.00011415627373000246, + "loss": 0.1692, + "step": 33350 + }, + { + "epoch": 1.2880806208733928, + "grad_norm": 2.43820858001709, + "learning_rate": 0.00011413053271040066, + "loss": 0.2777, + "step": 33360 + }, + { + "epoch": 1.2884667361674196, + "grad_norm": 1.3179954290390015, + "learning_rate": 0.00011410479169079889, + "loss": 0.1794, + "step": 33370 + }, + { + "epoch": 1.2888528514614463, + "grad_norm": 1.0040466785430908, + "learning_rate": 0.00011407905067119709, + "loss": 0.3037, + "step": 33380 + }, + { + "epoch": 1.2892389667554731, + "grad_norm": 5.296288013458252, + "learning_rate": 0.00011405330965159531, + "loss": 0.2904, + "step": 33390 + }, + { + "epoch": 1.2896250820495, + "grad_norm": 0.4267273247241974, + "learning_rate": 0.00011402756863199351, + "loss": 0.2263, + "step": 33400 + }, + { + "epoch": 1.2900111973435266, + "grad_norm": 0.8817713260650635, + "learning_rate": 0.00011400182761239173, + "loss": 0.2715, + "step": 33410 + }, + { + "epoch": 1.2903973126375536, + "grad_norm": 2.7891275882720947, + "learning_rate": 0.00011397608659278995, + "loss": 0.2781, + "step": 33420 + }, + { + "epoch": 1.2907834279315804, + "grad_norm": 0.3013952374458313, + "learning_rate": 0.00011395034557318815, + "loss": 0.2563, + "step": 33430 + }, + { + "epoch": 1.2911695432256072, + "grad_norm": 1.766413927078247, + "learning_rate": 0.00011392460455358638, + "loss": 0.1854, + "step": 33440 + }, + { + "epoch": 1.291555658519634, + "grad_norm": 0.25331103801727295, + "learning_rate": 0.00011389886353398458, + "loss": 0.1206, + "step": 33450 + }, + { + "epoch": 1.2919417738136607, + "grad_norm": 0.036400288343429565, + "learning_rate": 0.0001138731225143828, + "loss": 0.4707, + "step": 33460 + }, + { + "epoch": 1.2923278891076875, + "grad_norm": 1.5620888471603394, + "learning_rate": 0.00011384738149478102, + "loss": 0.3312, + "step": 33470 + }, + { + "epoch": 1.2927140044017142, + "grad_norm": 0.6670392155647278, + "learning_rate": 0.00011382164047517922, + "loss": 0.2341, + "step": 33480 + }, + { + "epoch": 1.2931001196957412, + "grad_norm": 2.3108737468719482, + "learning_rate": 0.00011379589945557745, + "loss": 0.3843, + "step": 33490 + }, + { + "epoch": 1.293486234989768, + "grad_norm": 0.8025147318840027, + "learning_rate": 0.00011377015843597565, + "loss": 0.1982, + "step": 33500 + }, + { + "epoch": 1.2938723502837948, + "grad_norm": 1.7835719585418701, + "learning_rate": 0.00011374441741637387, + "loss": 0.3285, + "step": 33510 + }, + { + "epoch": 1.2942584655778215, + "grad_norm": 2.041508913040161, + "learning_rate": 0.00011371867639677207, + "loss": 0.2044, + "step": 33520 + }, + { + "epoch": 1.2946445808718483, + "grad_norm": 1.103378415107727, + "learning_rate": 0.0001136929353771703, + "loss": 0.1682, + "step": 33530 + }, + { + "epoch": 1.295030696165875, + "grad_norm": 0.057376351207494736, + "learning_rate": 0.00011366719435756851, + "loss": 0.1642, + "step": 33540 + }, + { + "epoch": 1.2954168114599018, + "grad_norm": 0.6539410948753357, + "learning_rate": 0.00011364145333796671, + "loss": 0.1549, + "step": 33550 + }, + { + "epoch": 1.2958029267539288, + "grad_norm": 1.250543236732483, + "learning_rate": 0.00011361571231836494, + "loss": 0.3764, + "step": 33560 + }, + { + "epoch": 1.2961890420479556, + "grad_norm": 0.23697887361049652, + "learning_rate": 0.00011358997129876314, + "loss": 0.3999, + "step": 33570 + }, + { + "epoch": 1.2965751573419824, + "grad_norm": 0.9318505525588989, + "learning_rate": 0.00011356423027916137, + "loss": 0.4156, + "step": 33580 + }, + { + "epoch": 1.2969612726360091, + "grad_norm": 1.3910777568817139, + "learning_rate": 0.00011353848925955957, + "loss": 0.3455, + "step": 33590 + }, + { + "epoch": 1.297347387930036, + "grad_norm": 1.6764451265335083, + "learning_rate": 0.00011351274823995779, + "loss": 0.1884, + "step": 33600 + }, + { + "epoch": 1.2977335032240627, + "grad_norm": 0.9300051927566528, + "learning_rate": 0.000113487007220356, + "loss": 0.119, + "step": 33610 + }, + { + "epoch": 1.2981196185180894, + "grad_norm": 2.447462558746338, + "learning_rate": 0.00011346126620075422, + "loss": 0.4403, + "step": 33620 + }, + { + "epoch": 1.2985057338121164, + "grad_norm": 1.216407060623169, + "learning_rate": 0.00011343552518115243, + "loss": 0.2415, + "step": 33630 + }, + { + "epoch": 1.298891849106143, + "grad_norm": 2.968648910522461, + "learning_rate": 0.00011340978416155063, + "loss": 0.2899, + "step": 33640 + }, + { + "epoch": 1.29927796440017, + "grad_norm": 0.6649970412254333, + "learning_rate": 0.00011338404314194886, + "loss": 0.3809, + "step": 33650 + }, + { + "epoch": 1.2996640796941967, + "grad_norm": 1.7277917861938477, + "learning_rate": 0.00011335830212234709, + "loss": 0.3308, + "step": 33660 + }, + { + "epoch": 1.3000501949882235, + "grad_norm": 1.3269709348678589, + "learning_rate": 0.00011333256110274529, + "loss": 0.3682, + "step": 33670 + }, + { + "epoch": 1.3004363102822503, + "grad_norm": 0.20609407126903534, + "learning_rate": 0.0001133068200831435, + "loss": 0.1379, + "step": 33680 + }, + { + "epoch": 1.300822425576277, + "grad_norm": 0.6592215299606323, + "learning_rate": 0.00011328107906354171, + "loss": 0.2746, + "step": 33690 + }, + { + "epoch": 1.3012085408703038, + "grad_norm": 1.903635859489441, + "learning_rate": 0.00011325533804393993, + "loss": 0.4729, + "step": 33700 + }, + { + "epoch": 1.3015946561643306, + "grad_norm": 0.8432504534721375, + "learning_rate": 0.00011322959702433813, + "loss": 0.2835, + "step": 33710 + }, + { + "epoch": 1.3019807714583576, + "grad_norm": 0.9862542152404785, + "learning_rate": 0.00011320385600473635, + "loss": 0.1823, + "step": 33720 + }, + { + "epoch": 1.3023668867523843, + "grad_norm": 3.845738649368286, + "learning_rate": 0.00011317811498513458, + "loss": 0.2542, + "step": 33730 + }, + { + "epoch": 1.302753002046411, + "grad_norm": 0.6317747235298157, + "learning_rate": 0.00011315237396553278, + "loss": 0.22, + "step": 33740 + }, + { + "epoch": 1.3031391173404379, + "grad_norm": 2.5221354961395264, + "learning_rate": 0.000113126632945931, + "loss": 0.2253, + "step": 33750 + }, + { + "epoch": 1.3035252326344646, + "grad_norm": 1.3326247930526733, + "learning_rate": 0.0001131008919263292, + "loss": 0.2021, + "step": 33760 + }, + { + "epoch": 1.3039113479284914, + "grad_norm": 1.148047685623169, + "learning_rate": 0.00011307515090672742, + "loss": 0.3987, + "step": 33770 + }, + { + "epoch": 1.3042974632225182, + "grad_norm": 0.19721268117427826, + "learning_rate": 0.00011304940988712562, + "loss": 0.2642, + "step": 33780 + }, + { + "epoch": 1.3046835785165452, + "grad_norm": 1.4060617685317993, + "learning_rate": 0.00011302366886752385, + "loss": 0.2736, + "step": 33790 + }, + { + "epoch": 1.305069693810572, + "grad_norm": 1.0736548900604248, + "learning_rate": 0.00011299792784792207, + "loss": 0.2448, + "step": 33800 + }, + { + "epoch": 1.3054558091045987, + "grad_norm": 4.352476119995117, + "learning_rate": 0.00011297218682832027, + "loss": 0.383, + "step": 33810 + }, + { + "epoch": 1.3058419243986255, + "grad_norm": 0.2249228060245514, + "learning_rate": 0.0001129464458087185, + "loss": 0.14, + "step": 33820 + }, + { + "epoch": 1.3062280396926522, + "grad_norm": 0.4820781946182251, + "learning_rate": 0.0001129207047891167, + "loss": 0.248, + "step": 33830 + }, + { + "epoch": 1.306614154986679, + "grad_norm": 2.2983391284942627, + "learning_rate": 0.00011289496376951491, + "loss": 0.2608, + "step": 33840 + }, + { + "epoch": 1.3070002702807058, + "grad_norm": 1.3315671682357788, + "learning_rate": 0.00011286922274991314, + "loss": 0.1727, + "step": 33850 + }, + { + "epoch": 1.3073863855747327, + "grad_norm": 2.060299873352051, + "learning_rate": 0.00011284348173031134, + "loss": 0.3097, + "step": 33860 + }, + { + "epoch": 1.3077725008687593, + "grad_norm": 2.096285581588745, + "learning_rate": 0.00011281774071070957, + "loss": 0.2305, + "step": 33870 + }, + { + "epoch": 1.3081586161627863, + "grad_norm": 0.4997636675834656, + "learning_rate": 0.00011279199969110777, + "loss": 0.1993, + "step": 33880 + }, + { + "epoch": 1.308544731456813, + "grad_norm": 0.58636474609375, + "learning_rate": 0.00011276625867150599, + "loss": 0.1873, + "step": 33890 + }, + { + "epoch": 1.3089308467508398, + "grad_norm": 0.9128592610359192, + "learning_rate": 0.00011274051765190419, + "loss": 0.1885, + "step": 33900 + }, + { + "epoch": 1.3093169620448666, + "grad_norm": 2.228043794631958, + "learning_rate": 0.0001127147766323024, + "loss": 0.3649, + "step": 33910 + }, + { + "epoch": 1.3097030773388934, + "grad_norm": 1.069002389907837, + "learning_rate": 0.00011268903561270063, + "loss": 0.5454, + "step": 33920 + }, + { + "epoch": 1.3100891926329201, + "grad_norm": 0.6207597851753235, + "learning_rate": 0.00011266329459309883, + "loss": 0.2329, + "step": 33930 + }, + { + "epoch": 1.3104753079269469, + "grad_norm": 1.262247920036316, + "learning_rate": 0.00011263755357349706, + "loss": 0.3437, + "step": 33940 + }, + { + "epoch": 1.3108614232209739, + "grad_norm": 1.7429994344711304, + "learning_rate": 0.00011261181255389526, + "loss": 0.228, + "step": 33950 + }, + { + "epoch": 1.3112475385150006, + "grad_norm": 0.646900475025177, + "learning_rate": 0.00011258607153429349, + "loss": 0.3739, + "step": 33960 + }, + { + "epoch": 1.3116336538090274, + "grad_norm": 1.8228782415390015, + "learning_rate": 0.0001125603305146917, + "loss": 0.2325, + "step": 33970 + }, + { + "epoch": 1.3120197691030542, + "grad_norm": 3.539228916168213, + "learning_rate": 0.0001125345894950899, + "loss": 0.194, + "step": 33980 + }, + { + "epoch": 1.312405884397081, + "grad_norm": 1.2801135778427124, + "learning_rate": 0.00011250884847548813, + "loss": 0.3069, + "step": 33990 + }, + { + "epoch": 1.3127919996911077, + "grad_norm": 3.6265695095062256, + "learning_rate": 0.00011248310745588633, + "loss": 0.3113, + "step": 34000 + }, + { + "epoch": 1.3131781149851345, + "grad_norm": 0.07370063662528992, + "learning_rate": 0.00011245736643628455, + "loss": 0.1449, + "step": 34010 + }, + { + "epoch": 1.3135642302791615, + "grad_norm": 1.0295637845993042, + "learning_rate": 0.00011243162541668275, + "loss": 0.22, + "step": 34020 + }, + { + "epoch": 1.313950345573188, + "grad_norm": 0.8803662061691284, + "learning_rate": 0.00011240588439708098, + "loss": 0.1368, + "step": 34030 + }, + { + "epoch": 1.314336460867215, + "grad_norm": 1.6597707271575928, + "learning_rate": 0.00011238014337747919, + "loss": 0.3038, + "step": 34040 + }, + { + "epoch": 1.3147225761612418, + "grad_norm": 2.115492343902588, + "learning_rate": 0.00011235440235787739, + "loss": 0.1754, + "step": 34050 + }, + { + "epoch": 1.3151086914552685, + "grad_norm": 0.8143919706344604, + "learning_rate": 0.00011232866133827562, + "loss": 0.3764, + "step": 34060 + }, + { + "epoch": 1.3154948067492953, + "grad_norm": 0.14369767904281616, + "learning_rate": 0.00011230292031867382, + "loss": 0.1742, + "step": 34070 + }, + { + "epoch": 1.315880922043322, + "grad_norm": 1.0129845142364502, + "learning_rate": 0.00011227717929907205, + "loss": 0.1458, + "step": 34080 + }, + { + "epoch": 1.316267037337349, + "grad_norm": 2.7300291061401367, + "learning_rate": 0.00011225143827947025, + "loss": 0.3939, + "step": 34090 + }, + { + "epoch": 1.3166531526313756, + "grad_norm": 0.20205609500408173, + "learning_rate": 0.00011222569725986847, + "loss": 0.29, + "step": 34100 + }, + { + "epoch": 1.3170392679254026, + "grad_norm": 1.8928464651107788, + "learning_rate": 0.00011219995624026669, + "loss": 0.1742, + "step": 34110 + }, + { + "epoch": 1.3174253832194294, + "grad_norm": 0.2639687955379486, + "learning_rate": 0.00011217421522066488, + "loss": 0.1745, + "step": 34120 + }, + { + "epoch": 1.3178114985134561, + "grad_norm": 0.5906389355659485, + "learning_rate": 0.00011214847420106311, + "loss": 0.2134, + "step": 34130 + }, + { + "epoch": 1.318197613807483, + "grad_norm": 0.9190629720687866, + "learning_rate": 0.00011212273318146131, + "loss": 0.2547, + "step": 34140 + }, + { + "epoch": 1.3185837291015097, + "grad_norm": 0.5749151110649109, + "learning_rate": 0.00011209699216185954, + "loss": 0.1688, + "step": 34150 + }, + { + "epoch": 1.3189698443955364, + "grad_norm": 0.82295823097229, + "learning_rate": 0.00011207125114225777, + "loss": 0.2884, + "step": 34160 + }, + { + "epoch": 1.3193559596895632, + "grad_norm": 0.07816460728645325, + "learning_rate": 0.00011204551012265597, + "loss": 0.2418, + "step": 34170 + }, + { + "epoch": 1.3197420749835902, + "grad_norm": 0.6417407393455505, + "learning_rate": 0.00011201976910305418, + "loss": 0.2557, + "step": 34180 + }, + { + "epoch": 1.320128190277617, + "grad_norm": 6.093267440795898, + "learning_rate": 0.00011199402808345238, + "loss": 0.3088, + "step": 34190 + }, + { + "epoch": 1.3205143055716437, + "grad_norm": 1.8861887454986572, + "learning_rate": 0.0001119682870638506, + "loss": 0.2204, + "step": 34200 + }, + { + "epoch": 1.3209004208656705, + "grad_norm": 2.3272714614868164, + "learning_rate": 0.0001119425460442488, + "loss": 0.2236, + "step": 34210 + }, + { + "epoch": 1.3212865361596973, + "grad_norm": 0.9608810544013977, + "learning_rate": 0.00011191680502464703, + "loss": 0.1897, + "step": 34220 + }, + { + "epoch": 1.321672651453724, + "grad_norm": 1.2157350778579712, + "learning_rate": 0.00011189106400504526, + "loss": 0.1526, + "step": 34230 + }, + { + "epoch": 1.3220587667477508, + "grad_norm": 1.6684671640396118, + "learning_rate": 0.00011186532298544346, + "loss": 0.3394, + "step": 34240 + }, + { + "epoch": 1.3224448820417778, + "grad_norm": 2.0432374477386475, + "learning_rate": 0.00011183958196584167, + "loss": 0.2183, + "step": 34250 + }, + { + "epoch": 1.3228309973358043, + "grad_norm": 0.9436892867088318, + "learning_rate": 0.00011181384094623988, + "loss": 0.2947, + "step": 34260 + }, + { + "epoch": 1.3232171126298313, + "grad_norm": 0.23260092735290527, + "learning_rate": 0.0001117880999266381, + "loss": 0.114, + "step": 34270 + }, + { + "epoch": 1.323603227923858, + "grad_norm": 1.2291594743728638, + "learning_rate": 0.0001117623589070363, + "loss": 0.3145, + "step": 34280 + }, + { + "epoch": 1.3239893432178849, + "grad_norm": 0.41411107778549194, + "learning_rate": 0.00011173661788743452, + "loss": 0.2937, + "step": 34290 + }, + { + "epoch": 1.3243754585119116, + "grad_norm": 2.354405164718628, + "learning_rate": 0.00011171087686783275, + "loss": 0.3933, + "step": 34300 + }, + { + "epoch": 1.3247615738059384, + "grad_norm": 2.6997978687286377, + "learning_rate": 0.00011168513584823095, + "loss": 0.1494, + "step": 34310 + }, + { + "epoch": 1.3251476890999654, + "grad_norm": 2.8430919647216797, + "learning_rate": 0.00011165939482862916, + "loss": 0.2869, + "step": 34320 + }, + { + "epoch": 1.325533804393992, + "grad_norm": 1.1737356185913086, + "learning_rate": 0.00011163365380902738, + "loss": 0.2792, + "step": 34330 + }, + { + "epoch": 1.325919919688019, + "grad_norm": 4.123973846435547, + "learning_rate": 0.00011160791278942559, + "loss": 0.5211, + "step": 34340 + }, + { + "epoch": 1.3263060349820457, + "grad_norm": 0.8862038850784302, + "learning_rate": 0.00011158217176982382, + "loss": 0.2976, + "step": 34350 + }, + { + "epoch": 1.3266921502760725, + "grad_norm": 1.8690590858459473, + "learning_rate": 0.00011155643075022202, + "loss": 0.2485, + "step": 34360 + }, + { + "epoch": 1.3270782655700992, + "grad_norm": 2.885589599609375, + "learning_rate": 0.00011153068973062024, + "loss": 0.3084, + "step": 34370 + }, + { + "epoch": 1.327464380864126, + "grad_norm": 0.9898788928985596, + "learning_rate": 0.00011150494871101844, + "loss": 0.261, + "step": 34380 + }, + { + "epoch": 1.3278504961581528, + "grad_norm": 0.6879653930664062, + "learning_rate": 0.00011147920769141667, + "loss": 0.2082, + "step": 34390 + }, + { + "epoch": 1.3282366114521795, + "grad_norm": 1.2619003057479858, + "learning_rate": 0.00011145346667181487, + "loss": 0.2402, + "step": 34400 + }, + { + "epoch": 1.3286227267462065, + "grad_norm": 1.1212007999420166, + "learning_rate": 0.00011142772565221308, + "loss": 0.3525, + "step": 34410 + }, + { + "epoch": 1.3290088420402333, + "grad_norm": 1.8431956768035889, + "learning_rate": 0.00011140198463261131, + "loss": 0.2212, + "step": 34420 + }, + { + "epoch": 1.32939495733426, + "grad_norm": 0.6185423731803894, + "learning_rate": 0.00011137624361300951, + "loss": 0.2455, + "step": 34430 + }, + { + "epoch": 1.3297810726282868, + "grad_norm": 2.3791301250457764, + "learning_rate": 0.00011135050259340774, + "loss": 0.1763, + "step": 34440 + }, + { + "epoch": 1.3301671879223136, + "grad_norm": 0.4928603768348694, + "learning_rate": 0.00011132476157380594, + "loss": 0.2381, + "step": 34450 + }, + { + "epoch": 1.3305533032163404, + "grad_norm": 1.5636029243469238, + "learning_rate": 0.00011129902055420416, + "loss": 0.1368, + "step": 34460 + }, + { + "epoch": 1.3309394185103671, + "grad_norm": 0.9425283074378967, + "learning_rate": 0.00011127327953460238, + "loss": 0.2825, + "step": 34470 + }, + { + "epoch": 1.3313255338043941, + "grad_norm": 1.2257115840911865, + "learning_rate": 0.00011124753851500058, + "loss": 0.2547, + "step": 34480 + }, + { + "epoch": 1.3317116490984207, + "grad_norm": 0.9416170716285706, + "learning_rate": 0.0001112217974953988, + "loss": 0.2766, + "step": 34490 + }, + { + "epoch": 1.3320977643924476, + "grad_norm": 0.5123847126960754, + "learning_rate": 0.000111196056475797, + "loss": 0.4733, + "step": 34500 + }, + { + "epoch": 1.3324838796864744, + "grad_norm": 1.5581384897232056, + "learning_rate": 0.00011117031545619523, + "loss": 0.1597, + "step": 34510 + }, + { + "epoch": 1.3328699949805012, + "grad_norm": 2.377333879470825, + "learning_rate": 0.00011114457443659343, + "loss": 0.209, + "step": 34520 + }, + { + "epoch": 1.333256110274528, + "grad_norm": 1.7840913534164429, + "learning_rate": 0.00011111883341699166, + "loss": 0.1759, + "step": 34530 + }, + { + "epoch": 1.3336422255685547, + "grad_norm": 1.1825993061065674, + "learning_rate": 0.00011109309239738987, + "loss": 0.2464, + "step": 34540 + }, + { + "epoch": 1.3340283408625815, + "grad_norm": 1.8859659433364868, + "learning_rate": 0.00011106735137778807, + "loss": 0.3539, + "step": 34550 + }, + { + "epoch": 1.3344144561566083, + "grad_norm": 1.9698175191879272, + "learning_rate": 0.0001110416103581863, + "loss": 0.3301, + "step": 34560 + }, + { + "epoch": 1.3348005714506352, + "grad_norm": 0.7649385333061218, + "learning_rate": 0.0001110158693385845, + "loss": 0.232, + "step": 34570 + }, + { + "epoch": 1.335186686744662, + "grad_norm": 0.56386399269104, + "learning_rate": 0.00011099012831898272, + "loss": 0.3425, + "step": 34580 + }, + { + "epoch": 1.3355728020386888, + "grad_norm": 2.956003189086914, + "learning_rate": 0.00011096438729938092, + "loss": 0.1518, + "step": 34590 + }, + { + "epoch": 1.3359589173327155, + "grad_norm": 2.612029552459717, + "learning_rate": 0.00011093864627977915, + "loss": 0.2765, + "step": 34600 + }, + { + "epoch": 1.3363450326267423, + "grad_norm": 0.9674397706985474, + "learning_rate": 0.00011091290526017736, + "loss": 0.303, + "step": 34610 + }, + { + "epoch": 1.336731147920769, + "grad_norm": 0.9578921794891357, + "learning_rate": 0.00011088716424057556, + "loss": 0.1405, + "step": 34620 + }, + { + "epoch": 1.3371172632147958, + "grad_norm": 2.168065071105957, + "learning_rate": 0.00011086142322097379, + "loss": 0.1914, + "step": 34630 + }, + { + "epoch": 1.3375033785088228, + "grad_norm": 1.3166526556015015, + "learning_rate": 0.00011083568220137199, + "loss": 0.4134, + "step": 34640 + }, + { + "epoch": 1.3378894938028496, + "grad_norm": 0.9082283973693848, + "learning_rate": 0.00011080994118177022, + "loss": 0.2693, + "step": 34650 + }, + { + "epoch": 1.3382756090968764, + "grad_norm": 2.203007698059082, + "learning_rate": 0.00011078420016216844, + "loss": 0.1847, + "step": 34660 + }, + { + "epoch": 1.3386617243909031, + "grad_norm": 0.8101674914360046, + "learning_rate": 0.00011075845914256664, + "loss": 0.3111, + "step": 34670 + }, + { + "epoch": 1.33904783968493, + "grad_norm": 1.9545695781707764, + "learning_rate": 0.00011073271812296486, + "loss": 0.3761, + "step": 34680 + }, + { + "epoch": 1.3394339549789567, + "grad_norm": 1.547581672668457, + "learning_rate": 0.00011070697710336306, + "loss": 0.2374, + "step": 34690 + }, + { + "epoch": 1.3398200702729834, + "grad_norm": 3.3519034385681152, + "learning_rate": 0.00011068123608376128, + "loss": 0.1957, + "step": 34700 + }, + { + "epoch": 1.3402061855670104, + "grad_norm": 1.5508599281311035, + "learning_rate": 0.00011065549506415948, + "loss": 0.4171, + "step": 34710 + }, + { + "epoch": 1.340592300861037, + "grad_norm": 1.8547546863555908, + "learning_rate": 0.00011062975404455771, + "loss": 0.1872, + "step": 34720 + }, + { + "epoch": 1.340978416155064, + "grad_norm": 1.4600756168365479, + "learning_rate": 0.00011060401302495594, + "loss": 0.3515, + "step": 34730 + }, + { + "epoch": 1.3413645314490907, + "grad_norm": 0.05774044618010521, + "learning_rate": 0.00011057827200535414, + "loss": 0.1604, + "step": 34740 + }, + { + "epoch": 1.3417506467431175, + "grad_norm": 2.8793342113494873, + "learning_rate": 0.00011055253098575235, + "loss": 0.3095, + "step": 34750 + }, + { + "epoch": 1.3421367620371443, + "grad_norm": 2.241042375564575, + "learning_rate": 0.00011052678996615055, + "loss": 0.2511, + "step": 34760 + }, + { + "epoch": 1.342522877331171, + "grad_norm": 1.9320632219314575, + "learning_rate": 0.00011050104894654878, + "loss": 0.4493, + "step": 34770 + }, + { + "epoch": 1.3429089926251978, + "grad_norm": 1.6483882665634155, + "learning_rate": 0.000110475307926947, + "loss": 0.217, + "step": 34780 + }, + { + "epoch": 1.3432951079192246, + "grad_norm": 0.9635765552520752, + "learning_rate": 0.0001104495669073452, + "loss": 0.5458, + "step": 34790 + }, + { + "epoch": 1.3436812232132516, + "grad_norm": 1.2436567544937134, + "learning_rate": 0.00011042382588774343, + "loss": 0.2857, + "step": 34800 + }, + { + "epoch": 1.3440673385072783, + "grad_norm": 2.8082425594329834, + "learning_rate": 0.00011039808486814163, + "loss": 0.3439, + "step": 34810 + }, + { + "epoch": 1.344453453801305, + "grad_norm": 1.0430901050567627, + "learning_rate": 0.00011037234384853984, + "loss": 0.1404, + "step": 34820 + }, + { + "epoch": 1.3448395690953319, + "grad_norm": 1.7387149333953857, + "learning_rate": 0.00011034660282893806, + "loss": 0.395, + "step": 34830 + }, + { + "epoch": 1.3452256843893586, + "grad_norm": 1.2713748216629028, + "learning_rate": 0.00011032086180933627, + "loss": 0.29, + "step": 34840 + }, + { + "epoch": 1.3456117996833854, + "grad_norm": 0.26068204641342163, + "learning_rate": 0.0001102951207897345, + "loss": 0.1814, + "step": 34850 + }, + { + "epoch": 1.3459979149774122, + "grad_norm": 2.4163243770599365, + "learning_rate": 0.0001102693797701327, + "loss": 0.2445, + "step": 34860 + }, + { + "epoch": 1.3463840302714392, + "grad_norm": 2.2439687252044678, + "learning_rate": 0.00011024363875053092, + "loss": 0.3338, + "step": 34870 + }, + { + "epoch": 1.346770145565466, + "grad_norm": 0.2822403609752655, + "learning_rate": 0.00011021789773092912, + "loss": 0.1648, + "step": 34880 + }, + { + "epoch": 1.3471562608594927, + "grad_norm": 0.07319017499685287, + "learning_rate": 0.00011019215671132734, + "loss": 0.107, + "step": 34890 + }, + { + "epoch": 1.3475423761535195, + "grad_norm": 0.9809044003486633, + "learning_rate": 0.00011016641569172555, + "loss": 0.256, + "step": 34900 + }, + { + "epoch": 1.3479284914475462, + "grad_norm": 0.5016226768493652, + "learning_rate": 0.00011014067467212376, + "loss": 0.3229, + "step": 34910 + }, + { + "epoch": 1.348314606741573, + "grad_norm": 1.3026005029678345, + "learning_rate": 0.00011011493365252199, + "loss": 0.2145, + "step": 34920 + }, + { + "epoch": 1.3487007220355998, + "grad_norm": 1.0752215385437012, + "learning_rate": 0.00011008919263292019, + "loss": 0.2355, + "step": 34930 + }, + { + "epoch": 1.3490868373296268, + "grad_norm": 2.2703003883361816, + "learning_rate": 0.00011006345161331842, + "loss": 0.2079, + "step": 34940 + }, + { + "epoch": 1.3494729526236533, + "grad_norm": 1.1323810815811157, + "learning_rate": 0.00011003771059371662, + "loss": 0.1015, + "step": 34950 + }, + { + "epoch": 1.3498590679176803, + "grad_norm": 0.10813555121421814, + "learning_rate": 0.00011001196957411484, + "loss": 0.4214, + "step": 34960 + }, + { + "epoch": 1.350245183211707, + "grad_norm": 0.07815568149089813, + "learning_rate": 0.00010998622855451306, + "loss": 0.1211, + "step": 34970 + }, + { + "epoch": 1.3506312985057338, + "grad_norm": 0.6748234629631042, + "learning_rate": 0.00010996048753491126, + "loss": 0.3508, + "step": 34980 + }, + { + "epoch": 1.3510174137997606, + "grad_norm": 1.8556997776031494, + "learning_rate": 0.00010993474651530948, + "loss": 0.2268, + "step": 34990 + }, + { + "epoch": 1.3514035290937874, + "grad_norm": 0.8696061372756958, + "learning_rate": 0.00010990900549570768, + "loss": 0.4321, + "step": 35000 + }, + { + "epoch": 1.3517896443878141, + "grad_norm": 0.42442765831947327, + "learning_rate": 0.00010988326447610591, + "loss": 0.1944, + "step": 35010 + }, + { + "epoch": 1.352175759681841, + "grad_norm": 1.0474554300308228, + "learning_rate": 0.00010985752345650411, + "loss": 0.1342, + "step": 35020 + }, + { + "epoch": 1.3525618749758679, + "grad_norm": 0.607037365436554, + "learning_rate": 0.00010983178243690234, + "loss": 0.2965, + "step": 35030 + }, + { + "epoch": 1.3529479902698947, + "grad_norm": 1.8160990476608276, + "learning_rate": 0.00010980604141730055, + "loss": 0.3192, + "step": 35040 + }, + { + "epoch": 1.3533341055639214, + "grad_norm": 2.0026509761810303, + "learning_rate": 0.00010978030039769875, + "loss": 0.3054, + "step": 35050 + }, + { + "epoch": 1.3537202208579482, + "grad_norm": 0.9203600883483887, + "learning_rate": 0.00010975455937809698, + "loss": 0.253, + "step": 35060 + }, + { + "epoch": 1.354106336151975, + "grad_norm": 0.33198195695877075, + "learning_rate": 0.00010972881835849518, + "loss": 0.3885, + "step": 35070 + }, + { + "epoch": 1.3544924514460017, + "grad_norm": 0.3201223611831665, + "learning_rate": 0.0001097030773388934, + "loss": 0.3029, + "step": 35080 + }, + { + "epoch": 1.3548785667400285, + "grad_norm": 1.2589943408966064, + "learning_rate": 0.0001096773363192916, + "loss": 0.4243, + "step": 35090 + }, + { + "epoch": 1.3552646820340555, + "grad_norm": 1.5106219053268433, + "learning_rate": 0.00010965159529968983, + "loss": 0.2585, + "step": 35100 + }, + { + "epoch": 1.3556507973280822, + "grad_norm": 1.429799199104309, + "learning_rate": 0.00010962585428008804, + "loss": 0.1961, + "step": 35110 + }, + { + "epoch": 1.356036912622109, + "grad_norm": 2.1211297512054443, + "learning_rate": 0.00010960011326048624, + "loss": 0.4057, + "step": 35120 + }, + { + "epoch": 1.3564230279161358, + "grad_norm": 2.5154731273651123, + "learning_rate": 0.00010957437224088447, + "loss": 0.3787, + "step": 35130 + }, + { + "epoch": 1.3568091432101625, + "grad_norm": 0.4914834201335907, + "learning_rate": 0.00010954863122128267, + "loss": 0.234, + "step": 35140 + }, + { + "epoch": 1.3571952585041893, + "grad_norm": 0.26685893535614014, + "learning_rate": 0.0001095228902016809, + "loss": 0.2841, + "step": 35150 + }, + { + "epoch": 1.357581373798216, + "grad_norm": 0.15462155640125275, + "learning_rate": 0.00010949714918207912, + "loss": 0.2269, + "step": 35160 + }, + { + "epoch": 1.357967489092243, + "grad_norm": 1.3887063264846802, + "learning_rate": 0.00010947140816247732, + "loss": 0.3455, + "step": 35170 + }, + { + "epoch": 1.3583536043862696, + "grad_norm": 0.786374032497406, + "learning_rate": 0.00010944566714287554, + "loss": 0.2897, + "step": 35180 + }, + { + "epoch": 1.3587397196802966, + "grad_norm": 1.100475549697876, + "learning_rate": 0.00010941992612327374, + "loss": 0.2892, + "step": 35190 + }, + { + "epoch": 1.3591258349743234, + "grad_norm": 0.7676102519035339, + "learning_rate": 0.00010939418510367196, + "loss": 0.1942, + "step": 35200 + }, + { + "epoch": 1.3595119502683501, + "grad_norm": 0.33462053537368774, + "learning_rate": 0.00010936844408407016, + "loss": 0.2872, + "step": 35210 + }, + { + "epoch": 1.359898065562377, + "grad_norm": 0.9294387698173523, + "learning_rate": 0.00010934270306446839, + "loss": 0.2617, + "step": 35220 + }, + { + "epoch": 1.3602841808564037, + "grad_norm": 0.3169979453086853, + "learning_rate": 0.00010931696204486662, + "loss": 0.2942, + "step": 35230 + }, + { + "epoch": 1.3606702961504304, + "grad_norm": 2.1339616775512695, + "learning_rate": 0.00010929122102526482, + "loss": 0.4448, + "step": 35240 + }, + { + "epoch": 1.3610564114444572, + "grad_norm": 0.9430062770843506, + "learning_rate": 0.00010926548000566303, + "loss": 0.2051, + "step": 35250 + }, + { + "epoch": 1.3614425267384842, + "grad_norm": 3.1187360286712646, + "learning_rate": 0.00010923973898606123, + "loss": 0.2274, + "step": 35260 + }, + { + "epoch": 1.361828642032511, + "grad_norm": 1.4727579355239868, + "learning_rate": 0.00010921399796645946, + "loss": 0.3757, + "step": 35270 + }, + { + "epoch": 1.3622147573265377, + "grad_norm": 2.157560348510742, + "learning_rate": 0.00010918825694685768, + "loss": 0.3096, + "step": 35280 + }, + { + "epoch": 1.3626008726205645, + "grad_norm": 0.33457377552986145, + "learning_rate": 0.00010916251592725588, + "loss": 0.1489, + "step": 35290 + }, + { + "epoch": 1.3629869879145913, + "grad_norm": 0.9005904197692871, + "learning_rate": 0.00010913677490765411, + "loss": 0.1826, + "step": 35300 + }, + { + "epoch": 1.363373103208618, + "grad_norm": 2.1222829818725586, + "learning_rate": 0.00010911103388805231, + "loss": 0.1965, + "step": 35310 + }, + { + "epoch": 1.3637592185026448, + "grad_norm": 1.3881357908248901, + "learning_rate": 0.00010908529286845052, + "loss": 0.1791, + "step": 35320 + }, + { + "epoch": 1.3641453337966718, + "grad_norm": 1.7574503421783447, + "learning_rate": 0.00010905955184884872, + "loss": 0.3316, + "step": 35330 + }, + { + "epoch": 1.3645314490906983, + "grad_norm": 0.1967727392911911, + "learning_rate": 0.00010903381082924695, + "loss": 0.2331, + "step": 35340 + }, + { + "epoch": 1.3649175643847253, + "grad_norm": 0.8974360823631287, + "learning_rate": 0.00010900806980964518, + "loss": 0.2589, + "step": 35350 + }, + { + "epoch": 1.365303679678752, + "grad_norm": 2.0996744632720947, + "learning_rate": 0.00010898232879004338, + "loss": 0.3663, + "step": 35360 + }, + { + "epoch": 1.3656897949727789, + "grad_norm": 0.5678316354751587, + "learning_rate": 0.0001089565877704416, + "loss": 0.1729, + "step": 35370 + }, + { + "epoch": 1.3660759102668056, + "grad_norm": 2.3381874561309814, + "learning_rate": 0.0001089308467508398, + "loss": 0.1615, + "step": 35380 + }, + { + "epoch": 1.3664620255608324, + "grad_norm": 1.0276836156845093, + "learning_rate": 0.00010890510573123802, + "loss": 0.3359, + "step": 35390 + }, + { + "epoch": 1.3668481408548594, + "grad_norm": 2.4374940395355225, + "learning_rate": 0.00010887936471163622, + "loss": 0.2435, + "step": 35400 + }, + { + "epoch": 1.367234256148886, + "grad_norm": 0.45221665501594543, + "learning_rate": 0.00010885362369203444, + "loss": 0.2555, + "step": 35410 + }, + { + "epoch": 1.367620371442913, + "grad_norm": 2.608090400695801, + "learning_rate": 0.00010882788267243267, + "loss": 0.2465, + "step": 35420 + }, + { + "epoch": 1.3680064867369397, + "grad_norm": 1.4186642169952393, + "learning_rate": 0.00010880214165283087, + "loss": 0.1674, + "step": 35430 + }, + { + "epoch": 1.3683926020309665, + "grad_norm": 0.659479022026062, + "learning_rate": 0.0001087764006332291, + "loss": 0.2926, + "step": 35440 + }, + { + "epoch": 1.3687787173249932, + "grad_norm": 0.9219567179679871, + "learning_rate": 0.0001087506596136273, + "loss": 0.2001, + "step": 35450 + }, + { + "epoch": 1.36916483261902, + "grad_norm": 0.8070804476737976, + "learning_rate": 0.00010872491859402551, + "loss": 0.2178, + "step": 35460 + }, + { + "epoch": 1.3695509479130468, + "grad_norm": 2.9981069564819336, + "learning_rate": 0.00010869917757442374, + "loss": 0.3079, + "step": 35470 + }, + { + "epoch": 1.3699370632070735, + "grad_norm": 0.7891242504119873, + "learning_rate": 0.00010867343655482194, + "loss": 0.2765, + "step": 35480 + }, + { + "epoch": 1.3703231785011005, + "grad_norm": 1.448637843132019, + "learning_rate": 0.00010864769553522016, + "loss": 0.3521, + "step": 35490 + }, + { + "epoch": 1.3707092937951273, + "grad_norm": 0.07628043740987778, + "learning_rate": 0.00010862195451561836, + "loss": 0.2083, + "step": 35500 + }, + { + "epoch": 1.371095409089154, + "grad_norm": 0.7549735307693481, + "learning_rate": 0.00010859621349601659, + "loss": 0.2536, + "step": 35510 + }, + { + "epoch": 1.3714815243831808, + "grad_norm": 1.3548041582107544, + "learning_rate": 0.00010857047247641479, + "loss": 0.251, + "step": 35520 + }, + { + "epoch": 1.3718676396772076, + "grad_norm": 0.530010998249054, + "learning_rate": 0.000108544731456813, + "loss": 0.1917, + "step": 35530 + }, + { + "epoch": 1.3722537549712344, + "grad_norm": 0.4148992896080017, + "learning_rate": 0.00010851899043721123, + "loss": 0.335, + "step": 35540 + }, + { + "epoch": 1.3726398702652611, + "grad_norm": 1.5118776559829712, + "learning_rate": 0.00010849324941760943, + "loss": 0.2159, + "step": 35550 + }, + { + "epoch": 1.3730259855592881, + "grad_norm": 1.036889910697937, + "learning_rate": 0.00010846750839800766, + "loss": 0.2975, + "step": 35560 + }, + { + "epoch": 1.3734121008533147, + "grad_norm": 1.724263072013855, + "learning_rate": 0.00010844176737840586, + "loss": 0.1476, + "step": 35570 + }, + { + "epoch": 1.3737982161473417, + "grad_norm": 1.599007487297058, + "learning_rate": 0.00010841602635880408, + "loss": 0.2539, + "step": 35580 + }, + { + "epoch": 1.3741843314413684, + "grad_norm": 2.9119279384613037, + "learning_rate": 0.00010839028533920228, + "loss": 0.2688, + "step": 35590 + }, + { + "epoch": 1.3745704467353952, + "grad_norm": 1.8647874593734741, + "learning_rate": 0.00010836454431960051, + "loss": 0.4158, + "step": 35600 + }, + { + "epoch": 1.374956562029422, + "grad_norm": 3.925290822982788, + "learning_rate": 0.00010833880329999872, + "loss": 0.3333, + "step": 35610 + }, + { + "epoch": 1.3753426773234487, + "grad_norm": 0.7124634385108948, + "learning_rate": 0.00010831306228039692, + "loss": 0.1069, + "step": 35620 + }, + { + "epoch": 1.3757287926174757, + "grad_norm": 1.303579330444336, + "learning_rate": 0.00010828732126079515, + "loss": 0.2898, + "step": 35630 + }, + { + "epoch": 1.3761149079115023, + "grad_norm": 3.921804189682007, + "learning_rate": 0.00010826158024119335, + "loss": 0.4212, + "step": 35640 + }, + { + "epoch": 1.3765010232055293, + "grad_norm": 1.3194564580917358, + "learning_rate": 0.00010823583922159158, + "loss": 0.2771, + "step": 35650 + }, + { + "epoch": 1.376887138499556, + "grad_norm": 1.4237637519836426, + "learning_rate": 0.00010821009820198979, + "loss": 0.2463, + "step": 35660 + }, + { + "epoch": 1.3772732537935828, + "grad_norm": 1.8165888786315918, + "learning_rate": 0.000108184357182388, + "loss": 0.291, + "step": 35670 + }, + { + "epoch": 1.3776593690876096, + "grad_norm": 1.1056426763534546, + "learning_rate": 0.00010815861616278622, + "loss": 0.2525, + "step": 35680 + }, + { + "epoch": 1.3780454843816363, + "grad_norm": 1.483189582824707, + "learning_rate": 0.00010813287514318442, + "loss": 0.1569, + "step": 35690 + }, + { + "epoch": 1.378431599675663, + "grad_norm": 1.0666841268539429, + "learning_rate": 0.00010810713412358264, + "loss": 0.235, + "step": 35700 + }, + { + "epoch": 1.3788177149696899, + "grad_norm": 1.0299845933914185, + "learning_rate": 0.00010808139310398084, + "loss": 0.3892, + "step": 35710 + }, + { + "epoch": 1.3792038302637168, + "grad_norm": 2.3474409580230713, + "learning_rate": 0.00010805565208437907, + "loss": 0.3417, + "step": 35720 + }, + { + "epoch": 1.3795899455577436, + "grad_norm": 1.7456315755844116, + "learning_rate": 0.0001080299110647773, + "loss": 0.2538, + "step": 35730 + }, + { + "epoch": 1.3799760608517704, + "grad_norm": 2.866103410720825, + "learning_rate": 0.0001080041700451755, + "loss": 0.1619, + "step": 35740 + }, + { + "epoch": 1.3803621761457971, + "grad_norm": 0.29136407375335693, + "learning_rate": 0.00010797842902557371, + "loss": 0.2692, + "step": 35750 + }, + { + "epoch": 1.380748291439824, + "grad_norm": 0.8046161532402039, + "learning_rate": 0.00010795268800597191, + "loss": 0.1575, + "step": 35760 + }, + { + "epoch": 1.3811344067338507, + "grad_norm": 0.6451787352561951, + "learning_rate": 0.00010792694698637014, + "loss": 0.4914, + "step": 35770 + }, + { + "epoch": 1.3815205220278775, + "grad_norm": 0.7289161086082458, + "learning_rate": 0.00010790120596676836, + "loss": 0.1895, + "step": 35780 + }, + { + "epoch": 1.3819066373219044, + "grad_norm": 0.8300430178642273, + "learning_rate": 0.00010787546494716656, + "loss": 0.3663, + "step": 35790 + }, + { + "epoch": 1.382292752615931, + "grad_norm": 0.17713364958763123, + "learning_rate": 0.00010784972392756479, + "loss": 0.3189, + "step": 35800 + }, + { + "epoch": 1.382678867909958, + "grad_norm": 0.903222918510437, + "learning_rate": 0.00010782398290796299, + "loss": 0.1577, + "step": 35810 + }, + { + "epoch": 1.3830649832039847, + "grad_norm": 0.08617932349443436, + "learning_rate": 0.0001077982418883612, + "loss": 0.2872, + "step": 35820 + }, + { + "epoch": 1.3834510984980115, + "grad_norm": 1.9590895175933838, + "learning_rate": 0.0001077725008687594, + "loss": 0.2907, + "step": 35830 + }, + { + "epoch": 1.3838372137920383, + "grad_norm": 1.2515161037445068, + "learning_rate": 0.00010774675984915763, + "loss": 0.177, + "step": 35840 + }, + { + "epoch": 1.384223329086065, + "grad_norm": 1.6171292066574097, + "learning_rate": 0.00010772101882955586, + "loss": 0.2321, + "step": 35850 + }, + { + "epoch": 1.3846094443800918, + "grad_norm": 0.13681405782699585, + "learning_rate": 0.00010769527780995406, + "loss": 0.247, + "step": 35860 + }, + { + "epoch": 1.3849955596741186, + "grad_norm": 1.1949968338012695, + "learning_rate": 0.00010766953679035228, + "loss": 0.2978, + "step": 35870 + }, + { + "epoch": 1.3853816749681456, + "grad_norm": 0.17001692950725555, + "learning_rate": 0.00010764379577075048, + "loss": 0.3241, + "step": 35880 + }, + { + "epoch": 1.3857677902621723, + "grad_norm": 0.8227952122688293, + "learning_rate": 0.0001076180547511487, + "loss": 0.3499, + "step": 35890 + }, + { + "epoch": 1.386153905556199, + "grad_norm": 1.4185482263565063, + "learning_rate": 0.0001075923137315469, + "loss": 0.3109, + "step": 35900 + }, + { + "epoch": 1.3865400208502259, + "grad_norm": 0.9533351063728333, + "learning_rate": 0.00010756657271194512, + "loss": 0.24, + "step": 35910 + }, + { + "epoch": 1.3869261361442526, + "grad_norm": 1.077789306640625, + "learning_rate": 0.00010754083169234335, + "loss": 0.2662, + "step": 35920 + }, + { + "epoch": 1.3873122514382794, + "grad_norm": 1.3528363704681396, + "learning_rate": 0.00010751509067274155, + "loss": 0.1623, + "step": 35930 + }, + { + "epoch": 1.3876983667323062, + "grad_norm": 0.25122806429862976, + "learning_rate": 0.00010748934965313978, + "loss": 0.1639, + "step": 35940 + }, + { + "epoch": 1.3880844820263332, + "grad_norm": 0.9446159601211548, + "learning_rate": 0.00010746360863353798, + "loss": 0.2035, + "step": 35950 + }, + { + "epoch": 1.38847059732036, + "grad_norm": 1.2258719205856323, + "learning_rate": 0.00010743786761393619, + "loss": 0.4247, + "step": 35960 + }, + { + "epoch": 1.3888567126143867, + "grad_norm": 3.117729663848877, + "learning_rate": 0.00010741212659433442, + "loss": 0.2569, + "step": 35970 + }, + { + "epoch": 1.3892428279084135, + "grad_norm": 0.6333123445510864, + "learning_rate": 0.00010738638557473262, + "loss": 0.1843, + "step": 35980 + }, + { + "epoch": 1.3896289432024402, + "grad_norm": 1.49360191822052, + "learning_rate": 0.00010736064455513084, + "loss": 0.2318, + "step": 35990 + }, + { + "epoch": 1.390015058496467, + "grad_norm": 3.9082753658294678, + "learning_rate": 0.00010733490353552904, + "loss": 0.2037, + "step": 36000 + }, + { + "epoch": 1.3904011737904938, + "grad_norm": 0.5687323808670044, + "learning_rate": 0.00010730916251592727, + "loss": 0.0871, + "step": 36010 + }, + { + "epoch": 1.3907872890845208, + "grad_norm": 0.3480868339538574, + "learning_rate": 0.00010728342149632547, + "loss": 0.4009, + "step": 36020 + }, + { + "epoch": 1.3911734043785473, + "grad_norm": 1.325042486190796, + "learning_rate": 0.00010725768047672368, + "loss": 0.5143, + "step": 36030 + }, + { + "epoch": 1.3915595196725743, + "grad_norm": 2.114786386489868, + "learning_rate": 0.00010723193945712191, + "loss": 0.2944, + "step": 36040 + }, + { + "epoch": 1.391945634966601, + "grad_norm": 1.716272234916687, + "learning_rate": 0.00010720619843752011, + "loss": 0.3793, + "step": 36050 + }, + { + "epoch": 1.3923317502606278, + "grad_norm": 2.057535171508789, + "learning_rate": 0.00010718045741791834, + "loss": 0.199, + "step": 36060 + }, + { + "epoch": 1.3927178655546546, + "grad_norm": 2.371248483657837, + "learning_rate": 0.00010715471639831653, + "loss": 0.3276, + "step": 36070 + }, + { + "epoch": 1.3931039808486814, + "grad_norm": 0.1784186065196991, + "learning_rate": 0.00010712897537871476, + "loss": 0.0922, + "step": 36080 + }, + { + "epoch": 1.3934900961427081, + "grad_norm": 0.7923040390014648, + "learning_rate": 0.00010710323435911298, + "loss": 0.1599, + "step": 36090 + }, + { + "epoch": 1.393876211436735, + "grad_norm": 1.9013831615447998, + "learning_rate": 0.00010707749333951117, + "loss": 0.4997, + "step": 36100 + }, + { + "epoch": 1.394262326730762, + "grad_norm": 1.8659415245056152, + "learning_rate": 0.0001070517523199094, + "loss": 0.1851, + "step": 36110 + }, + { + "epoch": 1.3946484420247887, + "grad_norm": 2.5775375366210938, + "learning_rate": 0.0001070260113003076, + "loss": 0.2384, + "step": 36120 + }, + { + "epoch": 1.3950345573188154, + "grad_norm": 0.21943879127502441, + "learning_rate": 0.00010700027028070583, + "loss": 0.3996, + "step": 36130 + }, + { + "epoch": 1.3954206726128422, + "grad_norm": 1.1734743118286133, + "learning_rate": 0.00010697452926110403, + "loss": 0.1737, + "step": 36140 + }, + { + "epoch": 1.395806787906869, + "grad_norm": 0.697695791721344, + "learning_rate": 0.00010694878824150225, + "loss": 0.2778, + "step": 36150 + }, + { + "epoch": 1.3961929032008957, + "grad_norm": 3.2881579399108887, + "learning_rate": 0.00010692304722190047, + "loss": 0.3198, + "step": 36160 + }, + { + "epoch": 1.3965790184949225, + "grad_norm": 0.1592467725276947, + "learning_rate": 0.00010689730620229868, + "loss": 0.2591, + "step": 36170 + }, + { + "epoch": 1.3969651337889495, + "grad_norm": 0.4579029083251953, + "learning_rate": 0.0001068715651826969, + "loss": 0.2984, + "step": 36180 + }, + { + "epoch": 1.3973512490829763, + "grad_norm": 0.4286015033721924, + "learning_rate": 0.0001068458241630951, + "loss": 0.4098, + "step": 36190 + }, + { + "epoch": 1.397737364377003, + "grad_norm": 1.7824127674102783, + "learning_rate": 0.00010682008314349332, + "loss": 0.2446, + "step": 36200 + }, + { + "epoch": 1.3981234796710298, + "grad_norm": 0.8584449887275696, + "learning_rate": 0.00010679434212389152, + "loss": 0.1415, + "step": 36210 + }, + { + "epoch": 1.3985095949650566, + "grad_norm": 1.1699339151382446, + "learning_rate": 0.00010676860110428975, + "loss": 0.1706, + "step": 36220 + }, + { + "epoch": 1.3988957102590833, + "grad_norm": 2.615877389907837, + "learning_rate": 0.00010674286008468796, + "loss": 0.2605, + "step": 36230 + }, + { + "epoch": 1.39928182555311, + "grad_norm": 2.182037591934204, + "learning_rate": 0.00010671711906508617, + "loss": 0.2067, + "step": 36240 + }, + { + "epoch": 1.399667940847137, + "grad_norm": 2.183263063430786, + "learning_rate": 0.00010669137804548439, + "loss": 0.4704, + "step": 36250 + }, + { + "epoch": 1.4000540561411636, + "grad_norm": 3.505791187286377, + "learning_rate": 0.00010666563702588259, + "loss": 0.3385, + "step": 36260 + }, + { + "epoch": 1.4004401714351906, + "grad_norm": 1.2262030839920044, + "learning_rate": 0.00010663989600628081, + "loss": 0.3999, + "step": 36270 + }, + { + "epoch": 1.4008262867292174, + "grad_norm": 2.4024577140808105, + "learning_rate": 0.00010661415498667904, + "loss": 0.1984, + "step": 36280 + }, + { + "epoch": 1.4012124020232442, + "grad_norm": 0.4166090786457062, + "learning_rate": 0.00010658841396707724, + "loss": 0.2448, + "step": 36290 + }, + { + "epoch": 1.401598517317271, + "grad_norm": 0.422590047121048, + "learning_rate": 0.00010656267294747547, + "loss": 0.2464, + "step": 36300 + }, + { + "epoch": 1.4019846326112977, + "grad_norm": 2.287503480911255, + "learning_rate": 0.00010653693192787367, + "loss": 0.1621, + "step": 36310 + }, + { + "epoch": 1.4023707479053245, + "grad_norm": 0.8126110434532166, + "learning_rate": 0.00010651119090827188, + "loss": 0.2097, + "step": 36320 + }, + { + "epoch": 1.4027568631993512, + "grad_norm": 0.683016836643219, + "learning_rate": 0.00010648544988867008, + "loss": 0.1512, + "step": 36330 + }, + { + "epoch": 1.4031429784933782, + "grad_norm": 1.5477893352508545, + "learning_rate": 0.00010645970886906831, + "loss": 0.2169, + "step": 36340 + }, + { + "epoch": 1.403529093787405, + "grad_norm": 2.183166265487671, + "learning_rate": 0.00010643396784946653, + "loss": 0.4307, + "step": 36350 + }, + { + "epoch": 1.4039152090814317, + "grad_norm": 1.5782747268676758, + "learning_rate": 0.00010640822682986473, + "loss": 0.291, + "step": 36360 + }, + { + "epoch": 1.4043013243754585, + "grad_norm": 1.2823392152786255, + "learning_rate": 0.00010638248581026296, + "loss": 0.315, + "step": 36370 + }, + { + "epoch": 1.4046874396694853, + "grad_norm": 2.0126500129699707, + "learning_rate": 0.00010635674479066116, + "loss": 0.2565, + "step": 36380 + }, + { + "epoch": 1.405073554963512, + "grad_norm": 2.490217447280884, + "learning_rate": 0.00010633100377105937, + "loss": 0.2665, + "step": 36390 + }, + { + "epoch": 1.4054596702575388, + "grad_norm": 1.2054855823516846, + "learning_rate": 0.00010630526275145757, + "loss": 0.5455, + "step": 36400 + }, + { + "epoch": 1.4058457855515658, + "grad_norm": 0.7968757748603821, + "learning_rate": 0.0001062795217318558, + "loss": 0.333, + "step": 36410 + }, + { + "epoch": 1.4062319008455926, + "grad_norm": 1.1027718782424927, + "learning_rate": 0.00010625378071225403, + "loss": 0.2019, + "step": 36420 + }, + { + "epoch": 1.4066180161396193, + "grad_norm": 1.987302541732788, + "learning_rate": 0.00010622803969265223, + "loss": 0.3159, + "step": 36430 + }, + { + "epoch": 1.407004131433646, + "grad_norm": 1.5426512956619263, + "learning_rate": 0.00010620229867305045, + "loss": 0.3759, + "step": 36440 + }, + { + "epoch": 1.4073902467276729, + "grad_norm": 0.5228156447410583, + "learning_rate": 0.00010617655765344865, + "loss": 0.2465, + "step": 36450 + }, + { + "epoch": 1.4077763620216996, + "grad_norm": 0.46890121698379517, + "learning_rate": 0.00010615081663384687, + "loss": 0.302, + "step": 36460 + }, + { + "epoch": 1.4081624773157264, + "grad_norm": 2.1506495475769043, + "learning_rate": 0.0001061250756142451, + "loss": 0.2569, + "step": 36470 + }, + { + "epoch": 1.4085485926097534, + "grad_norm": 2.307468891143799, + "learning_rate": 0.0001060993345946433, + "loss": 0.2009, + "step": 36480 + }, + { + "epoch": 1.40893470790378, + "grad_norm": 0.07033026963472366, + "learning_rate": 0.00010607359357504152, + "loss": 0.1728, + "step": 36490 + }, + { + "epoch": 1.409320823197807, + "grad_norm": 0.3262972831726074, + "learning_rate": 0.00010604785255543972, + "loss": 0.2905, + "step": 36500 + }, + { + "epoch": 1.4097069384918337, + "grad_norm": 0.755646824836731, + "learning_rate": 0.00010602211153583795, + "loss": 0.3287, + "step": 36510 + }, + { + "epoch": 1.4100930537858605, + "grad_norm": 1.1180161237716675, + "learning_rate": 0.00010599637051623615, + "loss": 0.2743, + "step": 36520 + }, + { + "epoch": 1.4104791690798872, + "grad_norm": 1.2358392477035522, + "learning_rate": 0.00010597062949663436, + "loss": 0.2672, + "step": 36530 + }, + { + "epoch": 1.410865284373914, + "grad_norm": 1.4188988208770752, + "learning_rate": 0.00010594488847703259, + "loss": 0.2552, + "step": 36540 + }, + { + "epoch": 1.4112513996679408, + "grad_norm": 1.4727978706359863, + "learning_rate": 0.00010591914745743079, + "loss": 0.2256, + "step": 36550 + }, + { + "epoch": 1.4116375149619675, + "grad_norm": 0.08973213285207748, + "learning_rate": 0.00010589340643782901, + "loss": 0.224, + "step": 36560 + }, + { + "epoch": 1.4120236302559945, + "grad_norm": 0.9915102124214172, + "learning_rate": 0.00010586766541822721, + "loss": 0.195, + "step": 36570 + }, + { + "epoch": 1.4124097455500213, + "grad_norm": 0.8524800539016724, + "learning_rate": 0.00010584192439862544, + "loss": 0.1492, + "step": 36580 + }, + { + "epoch": 1.412795860844048, + "grad_norm": 1.8414466381072998, + "learning_rate": 0.00010581618337902365, + "loss": 0.3412, + "step": 36590 + }, + { + "epoch": 1.4131819761380748, + "grad_norm": 2.596547842025757, + "learning_rate": 0.00010579044235942185, + "loss": 0.3469, + "step": 36600 + }, + { + "epoch": 1.4135680914321016, + "grad_norm": 0.5367813110351562, + "learning_rate": 0.00010576470133982008, + "loss": 0.2274, + "step": 36610 + }, + { + "epoch": 1.4139542067261284, + "grad_norm": 2.70858097076416, + "learning_rate": 0.00010573896032021828, + "loss": 0.171, + "step": 36620 + }, + { + "epoch": 1.4143403220201551, + "grad_norm": 1.7077667713165283, + "learning_rate": 0.00010571321930061651, + "loss": 0.2638, + "step": 36630 + }, + { + "epoch": 1.4147264373141821, + "grad_norm": 0.9189953804016113, + "learning_rate": 0.00010568747828101471, + "loss": 0.2283, + "step": 36640 + }, + { + "epoch": 1.4151125526082087, + "grad_norm": 2.0240087509155273, + "learning_rate": 0.00010566173726141293, + "loss": 0.3166, + "step": 36650 + }, + { + "epoch": 1.4154986679022357, + "grad_norm": 3.3304507732391357, + "learning_rate": 0.00010563599624181115, + "loss": 0.3046, + "step": 36660 + }, + { + "epoch": 1.4158847831962624, + "grad_norm": 1.955929160118103, + "learning_rate": 0.00010561025522220935, + "loss": 0.2653, + "step": 36670 + }, + { + "epoch": 1.4162708984902892, + "grad_norm": 2.2465381622314453, + "learning_rate": 0.00010558451420260757, + "loss": 0.3119, + "step": 36680 + }, + { + "epoch": 1.416657013784316, + "grad_norm": 2.108614921569824, + "learning_rate": 0.00010555877318300577, + "loss": 0.2872, + "step": 36690 + }, + { + "epoch": 1.4170431290783427, + "grad_norm": 1.35493004322052, + "learning_rate": 0.000105533032163404, + "loss": 0.1491, + "step": 36700 + }, + { + "epoch": 1.4174292443723697, + "grad_norm": 0.9102393984794617, + "learning_rate": 0.0001055072911438022, + "loss": 0.1565, + "step": 36710 + }, + { + "epoch": 1.4178153596663963, + "grad_norm": 1.7878345251083374, + "learning_rate": 0.00010548155012420043, + "loss": 0.4864, + "step": 36720 + }, + { + "epoch": 1.4182014749604233, + "grad_norm": 1.4333637952804565, + "learning_rate": 0.00010545580910459864, + "loss": 0.3246, + "step": 36730 + }, + { + "epoch": 1.41858759025445, + "grad_norm": 0.0966360941529274, + "learning_rate": 0.00010543006808499684, + "loss": 0.1758, + "step": 36740 + }, + { + "epoch": 1.4189737055484768, + "grad_norm": 0.30043545365333557, + "learning_rate": 0.00010540432706539507, + "loss": 0.1811, + "step": 36750 + }, + { + "epoch": 1.4193598208425036, + "grad_norm": 1.7705951929092407, + "learning_rate": 0.00010537858604579327, + "loss": 0.1824, + "step": 36760 + }, + { + "epoch": 1.4197459361365303, + "grad_norm": 1.2025195360183716, + "learning_rate": 0.0001053528450261915, + "loss": 0.1463, + "step": 36770 + }, + { + "epoch": 1.420132051430557, + "grad_norm": 0.3154304325580597, + "learning_rate": 0.00010532710400658972, + "loss": 0.1817, + "step": 36780 + }, + { + "epoch": 1.4205181667245839, + "grad_norm": 3.392331838607788, + "learning_rate": 0.00010530136298698792, + "loss": 0.2938, + "step": 36790 + }, + { + "epoch": 1.4209042820186109, + "grad_norm": 0.7256132364273071, + "learning_rate": 0.00010527562196738613, + "loss": 0.28, + "step": 36800 + }, + { + "epoch": 1.4212903973126376, + "grad_norm": 1.9007991552352905, + "learning_rate": 0.00010524988094778435, + "loss": 0.2306, + "step": 36810 + }, + { + "epoch": 1.4216765126066644, + "grad_norm": 3.21189546585083, + "learning_rate": 0.00010522413992818256, + "loss": 0.2945, + "step": 36820 + }, + { + "epoch": 1.4220626279006912, + "grad_norm": 0.20476600527763367, + "learning_rate": 0.00010519839890858076, + "loss": 0.3129, + "step": 36830 + }, + { + "epoch": 1.422448743194718, + "grad_norm": 1.4225107431411743, + "learning_rate": 0.00010517265788897899, + "loss": 0.2262, + "step": 36840 + }, + { + "epoch": 1.4228348584887447, + "grad_norm": 1.203728437423706, + "learning_rate": 0.00010514691686937721, + "loss": 0.176, + "step": 36850 + }, + { + "epoch": 1.4232209737827715, + "grad_norm": 0.3001759648323059, + "learning_rate": 0.00010512117584977541, + "loss": 0.218, + "step": 36860 + }, + { + "epoch": 1.4236070890767984, + "grad_norm": 2.3143389225006104, + "learning_rate": 0.00010509543483017363, + "loss": 0.1562, + "step": 36870 + }, + { + "epoch": 1.423993204370825, + "grad_norm": 0.5087364912033081, + "learning_rate": 0.00010506969381057184, + "loss": 0.1207, + "step": 36880 + }, + { + "epoch": 1.424379319664852, + "grad_norm": 1.6521960496902466, + "learning_rate": 0.00010504395279097005, + "loss": 0.4756, + "step": 36890 + }, + { + "epoch": 1.4247654349588788, + "grad_norm": 0.09236706793308258, + "learning_rate": 0.00010501821177136825, + "loss": 0.1755, + "step": 36900 + }, + { + "epoch": 1.4251515502529055, + "grad_norm": 0.2143094390630722, + "learning_rate": 0.00010499247075176648, + "loss": 0.2126, + "step": 36910 + }, + { + "epoch": 1.4255376655469323, + "grad_norm": 1.210170865058899, + "learning_rate": 0.00010496672973216471, + "loss": 0.1391, + "step": 36920 + }, + { + "epoch": 1.425923780840959, + "grad_norm": 0.6248244047164917, + "learning_rate": 0.00010494098871256291, + "loss": 0.2898, + "step": 36930 + }, + { + "epoch": 1.426309896134986, + "grad_norm": 2.1100337505340576, + "learning_rate": 0.00010491524769296113, + "loss": 0.1198, + "step": 36940 + }, + { + "epoch": 1.4266960114290126, + "grad_norm": 2.5673348903656006, + "learning_rate": 0.00010488950667335933, + "loss": 0.2284, + "step": 36950 + }, + { + "epoch": 1.4270821267230396, + "grad_norm": 1.868195652961731, + "learning_rate": 0.00010486376565375755, + "loss": 0.3738, + "step": 36960 + }, + { + "epoch": 1.4274682420170663, + "grad_norm": 1.0951671600341797, + "learning_rate": 0.00010483802463415577, + "loss": 0.1424, + "step": 36970 + }, + { + "epoch": 1.4278543573110931, + "grad_norm": 0.4791143536567688, + "learning_rate": 0.00010481228361455397, + "loss": 0.1445, + "step": 36980 + }, + { + "epoch": 1.4282404726051199, + "grad_norm": 3.2477540969848633, + "learning_rate": 0.0001047865425949522, + "loss": 0.2676, + "step": 36990 + }, + { + "epoch": 1.4286265878991466, + "grad_norm": 0.8082342147827148, + "learning_rate": 0.0001047608015753504, + "loss": 0.2295, + "step": 37000 + }, + { + "epoch": 1.4290127031931734, + "grad_norm": 2.3048954010009766, + "learning_rate": 0.00010473506055574863, + "loss": 0.1996, + "step": 37010 + }, + { + "epoch": 1.4293988184872002, + "grad_norm": 0.42648783326148987, + "learning_rate": 0.00010470931953614683, + "loss": 0.2549, + "step": 37020 + }, + { + "epoch": 1.4297849337812272, + "grad_norm": 1.8794362545013428, + "learning_rate": 0.00010468357851654504, + "loss": 0.1929, + "step": 37030 + }, + { + "epoch": 1.430171049075254, + "grad_norm": 0.9679039120674133, + "learning_rate": 0.00010465783749694327, + "loss": 0.1544, + "step": 37040 + }, + { + "epoch": 1.4305571643692807, + "grad_norm": 0.7789367437362671, + "learning_rate": 0.00010463209647734147, + "loss": 0.2251, + "step": 37050 + }, + { + "epoch": 1.4309432796633075, + "grad_norm": 1.1705437898635864, + "learning_rate": 0.0001046063554577397, + "loss": 0.1567, + "step": 37060 + }, + { + "epoch": 1.4313293949573342, + "grad_norm": 0.13087430596351624, + "learning_rate": 0.00010458061443813789, + "loss": 0.3237, + "step": 37070 + }, + { + "epoch": 1.431715510251361, + "grad_norm": 1.0658761262893677, + "learning_rate": 0.00010455487341853612, + "loss": 0.24, + "step": 37080 + }, + { + "epoch": 1.4321016255453878, + "grad_norm": 1.686922311782837, + "learning_rate": 0.00010452913239893433, + "loss": 0.1911, + "step": 37090 + }, + { + "epoch": 1.4324877408394148, + "grad_norm": 2.010221481323242, + "learning_rate": 0.00010450339137933253, + "loss": 0.4569, + "step": 37100 + }, + { + "epoch": 1.4328738561334413, + "grad_norm": 0.8007562160491943, + "learning_rate": 0.00010447765035973076, + "loss": 0.1198, + "step": 37110 + }, + { + "epoch": 1.4332599714274683, + "grad_norm": 0.5455211997032166, + "learning_rate": 0.00010445190934012896, + "loss": 0.2179, + "step": 37120 + }, + { + "epoch": 1.433646086721495, + "grad_norm": 1.4133542776107788, + "learning_rate": 0.00010442616832052719, + "loss": 0.45, + "step": 37130 + }, + { + "epoch": 1.4340322020155218, + "grad_norm": 1.6218222379684448, + "learning_rate": 0.00010440042730092539, + "loss": 0.19, + "step": 37140 + }, + { + "epoch": 1.4344183173095486, + "grad_norm": 0.6425970196723938, + "learning_rate": 0.00010437468628132361, + "loss": 0.2345, + "step": 37150 + }, + { + "epoch": 1.4348044326035754, + "grad_norm": 0.4344918131828308, + "learning_rate": 0.00010434894526172183, + "loss": 0.2532, + "step": 37160 + }, + { + "epoch": 1.4351905478976021, + "grad_norm": 0.6628998517990112, + "learning_rate": 0.00010432320424212003, + "loss": 0.2281, + "step": 37170 + }, + { + "epoch": 1.435576663191629, + "grad_norm": 0.8695842027664185, + "learning_rate": 0.00010429746322251825, + "loss": 0.2796, + "step": 37180 + }, + { + "epoch": 1.435962778485656, + "grad_norm": 0.16481854021549225, + "learning_rate": 0.00010427172220291645, + "loss": 0.2333, + "step": 37190 + }, + { + "epoch": 1.4363488937796827, + "grad_norm": 0.7194454073905945, + "learning_rate": 0.00010424598118331468, + "loss": 0.1413, + "step": 37200 + }, + { + "epoch": 1.4367350090737094, + "grad_norm": 4.845508575439453, + "learning_rate": 0.00010422024016371288, + "loss": 0.3944, + "step": 37210 + }, + { + "epoch": 1.4371211243677362, + "grad_norm": 2.6386618614196777, + "learning_rate": 0.0001041944991441111, + "loss": 0.3037, + "step": 37220 + }, + { + "epoch": 1.437507239661763, + "grad_norm": 0.4089922308921814, + "learning_rate": 0.00010416875812450932, + "loss": 0.2985, + "step": 37230 + }, + { + "epoch": 1.4378933549557897, + "grad_norm": 1.456944465637207, + "learning_rate": 0.00010414301710490752, + "loss": 0.3031, + "step": 37240 + }, + { + "epoch": 1.4382794702498165, + "grad_norm": 1.301829218864441, + "learning_rate": 0.00010411727608530575, + "loss": 0.2578, + "step": 37250 + }, + { + "epoch": 1.4386655855438435, + "grad_norm": 1.2072703838348389, + "learning_rate": 0.00010409153506570395, + "loss": 0.391, + "step": 37260 + }, + { + "epoch": 1.4390517008378703, + "grad_norm": 0.5538531541824341, + "learning_rate": 0.00010406579404610217, + "loss": 0.1867, + "step": 37270 + }, + { + "epoch": 1.439437816131897, + "grad_norm": 1.0898678302764893, + "learning_rate": 0.0001040400530265004, + "loss": 0.2112, + "step": 37280 + }, + { + "epoch": 1.4398239314259238, + "grad_norm": 1.5769239664077759, + "learning_rate": 0.0001040143120068986, + "loss": 0.3121, + "step": 37290 + }, + { + "epoch": 1.4402100467199506, + "grad_norm": 0.3964422345161438, + "learning_rate": 0.00010398857098729681, + "loss": 0.196, + "step": 37300 + }, + { + "epoch": 1.4405961620139773, + "grad_norm": 1.0268182754516602, + "learning_rate": 0.00010396282996769501, + "loss": 0.1785, + "step": 37310 + }, + { + "epoch": 1.440982277308004, + "grad_norm": 1.750826358795166, + "learning_rate": 0.00010393708894809324, + "loss": 0.3654, + "step": 37320 + }, + { + "epoch": 1.441368392602031, + "grad_norm": 1.1231745481491089, + "learning_rate": 0.00010391134792849144, + "loss": 0.2594, + "step": 37330 + }, + { + "epoch": 1.4417545078960576, + "grad_norm": 0.2897786498069763, + "learning_rate": 0.00010388560690888967, + "loss": 0.3483, + "step": 37340 + }, + { + "epoch": 1.4421406231900846, + "grad_norm": 0.07170752435922623, + "learning_rate": 0.00010385986588928789, + "loss": 0.1462, + "step": 37350 + }, + { + "epoch": 1.4425267384841114, + "grad_norm": 3.326099395751953, + "learning_rate": 0.00010383412486968609, + "loss": 0.3785, + "step": 37360 + }, + { + "epoch": 1.4429128537781382, + "grad_norm": 0.5063263773918152, + "learning_rate": 0.0001038083838500843, + "loss": 0.2974, + "step": 37370 + }, + { + "epoch": 1.443298969072165, + "grad_norm": 1.160088062286377, + "learning_rate": 0.00010378264283048252, + "loss": 0.3003, + "step": 37380 + }, + { + "epoch": 1.4436850843661917, + "grad_norm": 0.7043284177780151, + "learning_rate": 0.00010375690181088073, + "loss": 0.3193, + "step": 37390 + }, + { + "epoch": 1.4440711996602185, + "grad_norm": 0.2916620373725891, + "learning_rate": 0.00010373116079127896, + "loss": 0.235, + "step": 37400 + }, + { + "epoch": 1.4444573149542452, + "grad_norm": 2.1940219402313232, + "learning_rate": 0.00010370541977167716, + "loss": 0.2657, + "step": 37410 + }, + { + "epoch": 1.4448434302482722, + "grad_norm": 1.849794626235962, + "learning_rate": 0.00010367967875207539, + "loss": 0.3121, + "step": 37420 + }, + { + "epoch": 1.445229545542299, + "grad_norm": 1.1139589548110962, + "learning_rate": 0.00010365393773247359, + "loss": 0.2179, + "step": 37430 + }, + { + "epoch": 1.4456156608363258, + "grad_norm": 0.917142927646637, + "learning_rate": 0.0001036281967128718, + "loss": 0.2406, + "step": 37440 + }, + { + "epoch": 1.4460017761303525, + "grad_norm": 1.6973673105239868, + "learning_rate": 0.00010360245569327001, + "loss": 0.3236, + "step": 37450 + }, + { + "epoch": 1.4463878914243793, + "grad_norm": 0.3979933559894562, + "learning_rate": 0.00010357671467366823, + "loss": 0.1477, + "step": 37460 + }, + { + "epoch": 1.446774006718406, + "grad_norm": 0.8938451409339905, + "learning_rate": 0.00010355097365406645, + "loss": 0.116, + "step": 37470 + }, + { + "epoch": 1.4471601220124328, + "grad_norm": 1.068787693977356, + "learning_rate": 0.00010352523263446465, + "loss": 0.2342, + "step": 37480 + }, + { + "epoch": 1.4475462373064598, + "grad_norm": 1.1985591650009155, + "learning_rate": 0.00010349949161486288, + "loss": 0.2423, + "step": 37490 + }, + { + "epoch": 1.4479323526004866, + "grad_norm": 1.0383973121643066, + "learning_rate": 0.00010347375059526108, + "loss": 0.3059, + "step": 37500 + }, + { + "epoch": 1.4483184678945134, + "grad_norm": 0.26652297377586365, + "learning_rate": 0.0001034480095756593, + "loss": 0.262, + "step": 37510 + }, + { + "epoch": 1.4487045831885401, + "grad_norm": 1.8498083353042603, + "learning_rate": 0.0001034222685560575, + "loss": 0.2122, + "step": 37520 + }, + { + "epoch": 1.4490906984825669, + "grad_norm": 1.2896068096160889, + "learning_rate": 0.00010339652753645572, + "loss": 0.2939, + "step": 37530 + }, + { + "epoch": 1.4494768137765937, + "grad_norm": 1.423343300819397, + "learning_rate": 0.00010337078651685395, + "loss": 0.3375, + "step": 37540 + }, + { + "epoch": 1.4498629290706204, + "grad_norm": 1.8248246908187866, + "learning_rate": 0.00010334504549725215, + "loss": 0.3534, + "step": 37550 + }, + { + "epoch": 1.4502490443646474, + "grad_norm": 2.3713393211364746, + "learning_rate": 0.00010331930447765037, + "loss": 0.2795, + "step": 37560 + }, + { + "epoch": 1.450635159658674, + "grad_norm": 2.1431849002838135, + "learning_rate": 0.00010329356345804857, + "loss": 0.2503, + "step": 37570 + }, + { + "epoch": 1.451021274952701, + "grad_norm": 1.6521297693252563, + "learning_rate": 0.0001032678224384468, + "loss": 0.1789, + "step": 37580 + }, + { + "epoch": 1.4514073902467277, + "grad_norm": 1.5589754581451416, + "learning_rate": 0.00010324208141884501, + "loss": 0.3201, + "step": 37590 + }, + { + "epoch": 1.4517935055407545, + "grad_norm": 0.4339803159236908, + "learning_rate": 0.00010321634039924321, + "loss": 0.2964, + "step": 37600 + }, + { + "epoch": 1.4521796208347812, + "grad_norm": 0.30054792761802673, + "learning_rate": 0.00010319059937964144, + "loss": 0.3477, + "step": 37610 + }, + { + "epoch": 1.452565736128808, + "grad_norm": 2.2864038944244385, + "learning_rate": 0.00010316485836003964, + "loss": 0.2849, + "step": 37620 + }, + { + "epoch": 1.4529518514228348, + "grad_norm": 1.8392651081085205, + "learning_rate": 0.00010313911734043787, + "loss": 0.2089, + "step": 37630 + }, + { + "epoch": 1.4533379667168616, + "grad_norm": 1.0444347858428955, + "learning_rate": 0.00010311337632083607, + "loss": 0.2306, + "step": 37640 + }, + { + "epoch": 1.4537240820108885, + "grad_norm": 1.265647053718567, + "learning_rate": 0.00010308763530123429, + "loss": 0.2192, + "step": 37650 + }, + { + "epoch": 1.4541101973049153, + "grad_norm": 1.0464913845062256, + "learning_rate": 0.0001030618942816325, + "loss": 0.2857, + "step": 37660 + }, + { + "epoch": 1.454496312598942, + "grad_norm": 0.5931240320205688, + "learning_rate": 0.0001030361532620307, + "loss": 0.1751, + "step": 37670 + }, + { + "epoch": 1.4548824278929688, + "grad_norm": 2.0155346393585205, + "learning_rate": 0.00010301041224242893, + "loss": 0.4124, + "step": 37680 + }, + { + "epoch": 1.4552685431869956, + "grad_norm": 2.843345880508423, + "learning_rate": 0.00010298467122282713, + "loss": 0.2119, + "step": 37690 + }, + { + "epoch": 1.4556546584810224, + "grad_norm": 1.977612018585205, + "learning_rate": 0.00010295893020322536, + "loss": 0.271, + "step": 37700 + }, + { + "epoch": 1.4560407737750491, + "grad_norm": 1.2862237691879272, + "learning_rate": 0.00010293318918362356, + "loss": 0.4147, + "step": 37710 + }, + { + "epoch": 1.4564268890690761, + "grad_norm": 0.8235340118408203, + "learning_rate": 0.00010290744816402179, + "loss": 0.1918, + "step": 37720 + }, + { + "epoch": 1.456813004363103, + "grad_norm": 1.0351753234863281, + "learning_rate": 0.00010288170714442, + "loss": 0.2343, + "step": 37730 + }, + { + "epoch": 1.4571991196571297, + "grad_norm": 0.2567160725593567, + "learning_rate": 0.0001028559661248182, + "loss": 0.2664, + "step": 37740 + }, + { + "epoch": 1.4575852349511564, + "grad_norm": 2.7498669624328613, + "learning_rate": 0.00010283022510521643, + "loss": 0.2839, + "step": 37750 + }, + { + "epoch": 1.4579713502451832, + "grad_norm": 0.7125422358512878, + "learning_rate": 0.00010280448408561463, + "loss": 0.2411, + "step": 37760 + }, + { + "epoch": 1.45835746553921, + "grad_norm": 0.720761239528656, + "learning_rate": 0.00010277874306601285, + "loss": 0.315, + "step": 37770 + }, + { + "epoch": 1.4587435808332367, + "grad_norm": 3.188563346862793, + "learning_rate": 0.00010275300204641108, + "loss": 0.3945, + "step": 37780 + }, + { + "epoch": 1.4591296961272637, + "grad_norm": 1.23422110080719, + "learning_rate": 0.00010272726102680928, + "loss": 0.1828, + "step": 37790 + }, + { + "epoch": 1.4595158114212903, + "grad_norm": 1.1572456359863281, + "learning_rate": 0.00010270152000720749, + "loss": 0.1687, + "step": 37800 + }, + { + "epoch": 1.4599019267153173, + "grad_norm": 0.6565262079238892, + "learning_rate": 0.00010267577898760569, + "loss": 0.3145, + "step": 37810 + }, + { + "epoch": 1.460288042009344, + "grad_norm": 0.6239646673202515, + "learning_rate": 0.00010265003796800392, + "loss": 0.2381, + "step": 37820 + }, + { + "epoch": 1.4606741573033708, + "grad_norm": 0.7578912377357483, + "learning_rate": 0.00010262429694840212, + "loss": 0.2019, + "step": 37830 + }, + { + "epoch": 1.4610602725973976, + "grad_norm": 1.0978549718856812, + "learning_rate": 0.00010259855592880035, + "loss": 0.2477, + "step": 37840 + }, + { + "epoch": 1.4614463878914243, + "grad_norm": 0.3615519404411316, + "learning_rate": 0.00010257281490919857, + "loss": 0.4241, + "step": 37850 + }, + { + "epoch": 1.461832503185451, + "grad_norm": 0.10355047881603241, + "learning_rate": 0.00010254707388959677, + "loss": 0.1181, + "step": 37860 + }, + { + "epoch": 1.4622186184794779, + "grad_norm": 0.23957425355911255, + "learning_rate": 0.00010252133286999499, + "loss": 0.1675, + "step": 37870 + }, + { + "epoch": 1.4626047337735049, + "grad_norm": 1.7581062316894531, + "learning_rate": 0.00010249559185039318, + "loss": 0.241, + "step": 37880 + }, + { + "epoch": 1.4629908490675316, + "grad_norm": 1.660989761352539, + "learning_rate": 0.00010246985083079141, + "loss": 0.2099, + "step": 37890 + }, + { + "epoch": 1.4633769643615584, + "grad_norm": 1.1359142065048218, + "learning_rate": 0.00010244410981118964, + "loss": 0.3686, + "step": 37900 + }, + { + "epoch": 1.4637630796555852, + "grad_norm": 0.9489149451255798, + "learning_rate": 0.00010241836879158784, + "loss": 0.3616, + "step": 37910 + }, + { + "epoch": 1.464149194949612, + "grad_norm": 1.5209956169128418, + "learning_rate": 0.00010239262777198607, + "loss": 0.3159, + "step": 37920 + }, + { + "epoch": 1.4645353102436387, + "grad_norm": 0.848943293094635, + "learning_rate": 0.00010236688675238427, + "loss": 0.3228, + "step": 37930 + }, + { + "epoch": 1.4649214255376655, + "grad_norm": 1.1476777791976929, + "learning_rate": 0.00010234114573278248, + "loss": 0.1873, + "step": 37940 + }, + { + "epoch": 1.4653075408316925, + "grad_norm": 1.4093862771987915, + "learning_rate": 0.00010231540471318068, + "loss": 0.5554, + "step": 37950 + }, + { + "epoch": 1.465693656125719, + "grad_norm": 0.17349161207675934, + "learning_rate": 0.0001022896636935789, + "loss": 0.2992, + "step": 37960 + }, + { + "epoch": 1.466079771419746, + "grad_norm": 0.28885993361473083, + "learning_rate": 0.00010226392267397713, + "loss": 0.2618, + "step": 37970 + }, + { + "epoch": 1.4664658867137728, + "grad_norm": 1.1087830066680908, + "learning_rate": 0.00010223818165437533, + "loss": 0.2647, + "step": 37980 + }, + { + "epoch": 1.4668520020077995, + "grad_norm": 0.08338876068592072, + "learning_rate": 0.00010221244063477356, + "loss": 0.3057, + "step": 37990 + }, + { + "epoch": 1.4672381173018263, + "grad_norm": 2.159362316131592, + "learning_rate": 0.00010218669961517176, + "loss": 0.1499, + "step": 38000 + }, + { + "epoch": 1.467624232595853, + "grad_norm": 0.8207988142967224, + "learning_rate": 0.00010216095859556997, + "loss": 0.1482, + "step": 38010 + }, + { + "epoch": 1.46801034788988, + "grad_norm": 0.5458611845970154, + "learning_rate": 0.00010213521757596818, + "loss": 0.2794, + "step": 38020 + }, + { + "epoch": 1.4683964631839066, + "grad_norm": 1.6955047845840454, + "learning_rate": 0.0001021094765563664, + "loss": 0.2627, + "step": 38030 + }, + { + "epoch": 1.4687825784779336, + "grad_norm": 0.9796440601348877, + "learning_rate": 0.00010208373553676463, + "loss": 0.1924, + "step": 38040 + }, + { + "epoch": 1.4691686937719604, + "grad_norm": 0.9906508326530457, + "learning_rate": 0.00010205799451716282, + "loss": 0.1597, + "step": 38050 + }, + { + "epoch": 1.4695548090659871, + "grad_norm": 1.8590656518936157, + "learning_rate": 0.00010203225349756105, + "loss": 0.2874, + "step": 38060 + }, + { + "epoch": 1.469940924360014, + "grad_norm": 1.6403672695159912, + "learning_rate": 0.00010200651247795925, + "loss": 0.2926, + "step": 38070 + }, + { + "epoch": 1.4703270396540407, + "grad_norm": 0.4410895109176636, + "learning_rate": 0.00010198077145835746, + "loss": 0.1368, + "step": 38080 + }, + { + "epoch": 1.4707131549480674, + "grad_norm": 2.4955286979675293, + "learning_rate": 0.00010195503043875569, + "loss": 0.3195, + "step": 38090 + }, + { + "epoch": 1.4710992702420942, + "grad_norm": 1.1799029111862183, + "learning_rate": 0.00010192928941915389, + "loss": 0.1867, + "step": 38100 + }, + { + "epoch": 1.4714853855361212, + "grad_norm": 0.7959389090538025, + "learning_rate": 0.00010190354839955212, + "loss": 0.2988, + "step": 38110 + }, + { + "epoch": 1.471871500830148, + "grad_norm": 2.7750720977783203, + "learning_rate": 0.00010187780737995032, + "loss": 0.1886, + "step": 38120 + }, + { + "epoch": 1.4722576161241747, + "grad_norm": 1.5834373235702515, + "learning_rate": 0.00010185206636034854, + "loss": 0.3542, + "step": 38130 + }, + { + "epoch": 1.4726437314182015, + "grad_norm": 1.9757747650146484, + "learning_rate": 0.00010182632534074674, + "loss": 0.302, + "step": 38140 + }, + { + "epoch": 1.4730298467122283, + "grad_norm": 1.1752204895019531, + "learning_rate": 0.00010180058432114497, + "loss": 0.3535, + "step": 38150 + }, + { + "epoch": 1.473415962006255, + "grad_norm": 0.3877789378166199, + "learning_rate": 0.00010177484330154318, + "loss": 0.2508, + "step": 38160 + }, + { + "epoch": 1.4738020773002818, + "grad_norm": 0.13749545812606812, + "learning_rate": 0.00010174910228194138, + "loss": 0.2141, + "step": 38170 + }, + { + "epoch": 1.4741881925943088, + "grad_norm": 1.3663641214370728, + "learning_rate": 0.00010172336126233961, + "loss": 0.3231, + "step": 38180 + }, + { + "epoch": 1.4745743078883353, + "grad_norm": 1.6267393827438354, + "learning_rate": 0.00010169762024273781, + "loss": 0.3233, + "step": 38190 + }, + { + "epoch": 1.4749604231823623, + "grad_norm": 0.2993789315223694, + "learning_rate": 0.00010167187922313604, + "loss": 0.28, + "step": 38200 + }, + { + "epoch": 1.475346538476389, + "grad_norm": 0.16693222522735596, + "learning_rate": 0.00010164613820353424, + "loss": 0.188, + "step": 38210 + }, + { + "epoch": 1.4757326537704158, + "grad_norm": 0.6939979791641235, + "learning_rate": 0.00010162039718393246, + "loss": 0.263, + "step": 38220 + }, + { + "epoch": 1.4761187690644426, + "grad_norm": 0.37910985946655273, + "learning_rate": 0.00010159465616433068, + "loss": 0.1963, + "step": 38230 + }, + { + "epoch": 1.4765048843584694, + "grad_norm": 1.782188892364502, + "learning_rate": 0.00010156891514472888, + "loss": 0.3814, + "step": 38240 + }, + { + "epoch": 1.4768909996524964, + "grad_norm": 1.159278392791748, + "learning_rate": 0.0001015431741251271, + "loss": 0.2043, + "step": 38250 + }, + { + "epoch": 1.477277114946523, + "grad_norm": 1.09486985206604, + "learning_rate": 0.0001015174331055253, + "loss": 0.2128, + "step": 38260 + }, + { + "epoch": 1.47766323024055, + "grad_norm": 0.36655205488204956, + "learning_rate": 0.00010149169208592353, + "loss": 0.298, + "step": 38270 + }, + { + "epoch": 1.4780493455345767, + "grad_norm": 0.8908851742744446, + "learning_rate": 0.00010146595106632176, + "loss": 0.3707, + "step": 38280 + }, + { + "epoch": 1.4784354608286034, + "grad_norm": 0.251338928937912, + "learning_rate": 0.00010144021004671996, + "loss": 0.2495, + "step": 38290 + }, + { + "epoch": 1.4788215761226302, + "grad_norm": 1.0613712072372437, + "learning_rate": 0.00010141446902711817, + "loss": 0.2112, + "step": 38300 + }, + { + "epoch": 1.479207691416657, + "grad_norm": 1.459799885749817, + "learning_rate": 0.00010138872800751637, + "loss": 0.2595, + "step": 38310 + }, + { + "epoch": 1.4795938067106837, + "grad_norm": 2.6898603439331055, + "learning_rate": 0.0001013629869879146, + "loss": 0.2758, + "step": 38320 + }, + { + "epoch": 1.4799799220047105, + "grad_norm": 0.19628773629665375, + "learning_rate": 0.0001013372459683128, + "loss": 0.1843, + "step": 38330 + }, + { + "epoch": 1.4803660372987375, + "grad_norm": 2.0871078968048096, + "learning_rate": 0.00010131150494871102, + "loss": 0.1661, + "step": 38340 + }, + { + "epoch": 1.4807521525927643, + "grad_norm": 0.7689336538314819, + "learning_rate": 0.00010128576392910925, + "loss": 0.157, + "step": 38350 + }, + { + "epoch": 1.481138267886791, + "grad_norm": 1.4471644163131714, + "learning_rate": 0.00010126002290950745, + "loss": 0.2159, + "step": 38360 + }, + { + "epoch": 1.4815243831808178, + "grad_norm": 2.198559522628784, + "learning_rate": 0.00010123428188990566, + "loss": 0.5609, + "step": 38370 + }, + { + "epoch": 1.4819104984748446, + "grad_norm": 0.16012130677700043, + "learning_rate": 0.00010120854087030386, + "loss": 0.1979, + "step": 38380 + }, + { + "epoch": 1.4822966137688713, + "grad_norm": 1.0222225189208984, + "learning_rate": 0.00010118279985070209, + "loss": 0.1547, + "step": 38390 + }, + { + "epoch": 1.482682729062898, + "grad_norm": 2.7192416191101074, + "learning_rate": 0.00010115705883110032, + "loss": 0.4942, + "step": 38400 + }, + { + "epoch": 1.483068844356925, + "grad_norm": 1.887128472328186, + "learning_rate": 0.00010113131781149852, + "loss": 0.177, + "step": 38410 + }, + { + "epoch": 1.4834549596509516, + "grad_norm": 2.7628560066223145, + "learning_rate": 0.00010110557679189674, + "loss": 0.2931, + "step": 38420 + }, + { + "epoch": 1.4838410749449786, + "grad_norm": 0.3852572739124298, + "learning_rate": 0.00010107983577229494, + "loss": 0.3392, + "step": 38430 + }, + { + "epoch": 1.4842271902390054, + "grad_norm": 1.047448992729187, + "learning_rate": 0.00010105409475269316, + "loss": 0.3741, + "step": 38440 + }, + { + "epoch": 1.4846133055330322, + "grad_norm": 1.4930602312088013, + "learning_rate": 0.00010102835373309136, + "loss": 0.2564, + "step": 38450 + }, + { + "epoch": 1.484999420827059, + "grad_norm": 1.3012608289718628, + "learning_rate": 0.00010100261271348958, + "loss": 0.3376, + "step": 38460 + }, + { + "epoch": 1.4853855361210857, + "grad_norm": 2.163942337036133, + "learning_rate": 0.00010097687169388781, + "loss": 0.3548, + "step": 38470 + }, + { + "epoch": 1.4857716514151125, + "grad_norm": 1.864189624786377, + "learning_rate": 0.00010095113067428601, + "loss": 0.165, + "step": 38480 + }, + { + "epoch": 1.4861577667091392, + "grad_norm": 0.5661312341690063, + "learning_rate": 0.00010092538965468424, + "loss": 0.1764, + "step": 38490 + }, + { + "epoch": 1.4865438820031662, + "grad_norm": 0.13517481088638306, + "learning_rate": 0.00010089964863508244, + "loss": 0.5223, + "step": 38500 + }, + { + "epoch": 1.486929997297193, + "grad_norm": 0.665143609046936, + "learning_rate": 0.00010087390761548065, + "loss": 0.1943, + "step": 38510 + }, + { + "epoch": 1.4873161125912198, + "grad_norm": 1.2759610414505005, + "learning_rate": 0.00010084816659587885, + "loss": 0.3023, + "step": 38520 + }, + { + "epoch": 1.4877022278852465, + "grad_norm": 1.3209573030471802, + "learning_rate": 0.00010082242557627708, + "loss": 0.1101, + "step": 38530 + }, + { + "epoch": 1.4880883431792733, + "grad_norm": 1.2501552104949951, + "learning_rate": 0.0001007966845566753, + "loss": 0.3931, + "step": 38540 + }, + { + "epoch": 1.4884744584733, + "grad_norm": 0.6862074732780457, + "learning_rate": 0.0001007709435370735, + "loss": 0.4093, + "step": 38550 + }, + { + "epoch": 1.4888605737673268, + "grad_norm": 1.90501070022583, + "learning_rate": 0.00010074520251747173, + "loss": 0.2553, + "step": 38560 + }, + { + "epoch": 1.4892466890613538, + "grad_norm": 1.6547000408172607, + "learning_rate": 0.00010071946149786993, + "loss": 0.1558, + "step": 38570 + }, + { + "epoch": 1.4896328043553806, + "grad_norm": 0.35097751021385193, + "learning_rate": 0.00010069372047826814, + "loss": 0.2253, + "step": 38580 + }, + { + "epoch": 1.4900189196494074, + "grad_norm": 0.15141837298870087, + "learning_rate": 0.00010066797945866637, + "loss": 0.2124, + "step": 38590 + }, + { + "epoch": 1.4904050349434341, + "grad_norm": 1.7070786952972412, + "learning_rate": 0.00010064223843906457, + "loss": 0.3181, + "step": 38600 + }, + { + "epoch": 1.490791150237461, + "grad_norm": 0.5400305390357971, + "learning_rate": 0.0001006164974194628, + "loss": 0.3203, + "step": 38610 + }, + { + "epoch": 1.4911772655314877, + "grad_norm": 1.6475050449371338, + "learning_rate": 0.000100590756399861, + "loss": 0.2965, + "step": 38620 + }, + { + "epoch": 1.4915633808255144, + "grad_norm": 0.21372176706790924, + "learning_rate": 0.00010056501538025922, + "loss": 0.1456, + "step": 38630 + }, + { + "epoch": 1.4919494961195414, + "grad_norm": 0.24179309606552124, + "learning_rate": 0.00010053927436065742, + "loss": 0.2077, + "step": 38640 + }, + { + "epoch": 1.492335611413568, + "grad_norm": 1.2079945802688599, + "learning_rate": 0.00010051353334105564, + "loss": 0.1554, + "step": 38650 + }, + { + "epoch": 1.492721726707595, + "grad_norm": 1.8915836811065674, + "learning_rate": 0.00010048779232145386, + "loss": 0.3815, + "step": 38660 + }, + { + "epoch": 1.4931078420016217, + "grad_norm": 1.8128750324249268, + "learning_rate": 0.00010046205130185206, + "loss": 0.1944, + "step": 38670 + }, + { + "epoch": 1.4934939572956485, + "grad_norm": 2.0955018997192383, + "learning_rate": 0.00010043631028225029, + "loss": 0.2468, + "step": 38680 + }, + { + "epoch": 1.4938800725896753, + "grad_norm": 3.0578064918518066, + "learning_rate": 0.00010041056926264849, + "loss": 0.2186, + "step": 38690 + }, + { + "epoch": 1.494266187883702, + "grad_norm": 0.7817699909210205, + "learning_rate": 0.00010038482824304672, + "loss": 0.1154, + "step": 38700 + }, + { + "epoch": 1.4946523031777288, + "grad_norm": 6.556485176086426, + "learning_rate": 0.00010035908722344493, + "loss": 0.4752, + "step": 38710 + }, + { + "epoch": 1.4950384184717556, + "grad_norm": 2.1970055103302, + "learning_rate": 0.00010033334620384314, + "loss": 0.3234, + "step": 38720 + }, + { + "epoch": 1.4954245337657825, + "grad_norm": 1.8929531574249268, + "learning_rate": 0.00010030760518424136, + "loss": 0.1605, + "step": 38730 + }, + { + "epoch": 1.4958106490598093, + "grad_norm": 0.3992670178413391, + "learning_rate": 0.00010028186416463956, + "loss": 0.2417, + "step": 38740 + }, + { + "epoch": 1.496196764353836, + "grad_norm": 1.7508872747421265, + "learning_rate": 0.00010025612314503778, + "loss": 0.329, + "step": 38750 + }, + { + "epoch": 1.4965828796478629, + "grad_norm": 0.24950659275054932, + "learning_rate": 0.00010023038212543598, + "loss": 0.1159, + "step": 38760 + }, + { + "epoch": 1.4969689949418896, + "grad_norm": 1.5187748670578003, + "learning_rate": 0.00010020464110583421, + "loss": 0.1827, + "step": 38770 + }, + { + "epoch": 1.4973551102359164, + "grad_norm": 1.1223959922790527, + "learning_rate": 0.00010017890008623242, + "loss": 0.1935, + "step": 38780 + }, + { + "epoch": 1.4977412255299432, + "grad_norm": 0.9355156421661377, + "learning_rate": 0.00010015315906663064, + "loss": 0.2225, + "step": 38790 + }, + { + "epoch": 1.4981273408239701, + "grad_norm": 0.4834296405315399, + "learning_rate": 0.00010012741804702885, + "loss": 0.3077, + "step": 38800 + }, + { + "epoch": 1.498513456117997, + "grad_norm": 1.0983386039733887, + "learning_rate": 0.00010010167702742705, + "loss": 0.183, + "step": 38810 + }, + { + "epoch": 1.4988995714120237, + "grad_norm": 0.8350847959518433, + "learning_rate": 0.00010007593600782528, + "loss": 0.3972, + "step": 38820 + }, + { + "epoch": 1.4992856867060504, + "grad_norm": 0.8200152516365051, + "learning_rate": 0.00010005019498822348, + "loss": 0.2043, + "step": 38830 + }, + { + "epoch": 1.4996718020000772, + "grad_norm": 0.9136185050010681, + "learning_rate": 0.0001000244539686217, + "loss": 0.1727, + "step": 38840 + }, + { + "epoch": 1.500057917294104, + "grad_norm": 0.8466988205909729, + "learning_rate": 9.999871294901992e-05, + "loss": 0.2119, + "step": 38850 + }, + { + "epoch": 1.5004440325881307, + "grad_norm": 0.4185144305229187, + "learning_rate": 9.997297192941813e-05, + "loss": 0.4046, + "step": 38860 + }, + { + "epoch": 1.5008301478821577, + "grad_norm": 2.232264518737793, + "learning_rate": 9.994723090981634e-05, + "loss": 0.304, + "step": 38870 + }, + { + "epoch": 1.5012162631761843, + "grad_norm": 0.13785889744758606, + "learning_rate": 9.992148989021456e-05, + "loss": 0.1045, + "step": 38880 + }, + { + "epoch": 1.5016023784702113, + "grad_norm": 1.6270711421966553, + "learning_rate": 9.989574887061277e-05, + "loss": 0.2791, + "step": 38890 + }, + { + "epoch": 1.501988493764238, + "grad_norm": 0.08486157655715942, + "learning_rate": 9.987000785101098e-05, + "loss": 0.178, + "step": 38900 + }, + { + "epoch": 1.5023746090582648, + "grad_norm": 1.862197995185852, + "learning_rate": 9.98442668314092e-05, + "loss": 0.2461, + "step": 38910 + }, + { + "epoch": 1.5027607243522916, + "grad_norm": 2.752070903778076, + "learning_rate": 9.981852581180741e-05, + "loss": 0.4299, + "step": 38920 + }, + { + "epoch": 1.5031468396463183, + "grad_norm": 2.08542537689209, + "learning_rate": 9.979278479220562e-05, + "loss": 0.2898, + "step": 38930 + }, + { + "epoch": 1.5035329549403453, + "grad_norm": 0.8629382848739624, + "learning_rate": 9.976704377260384e-05, + "loss": 0.226, + "step": 38940 + }, + { + "epoch": 1.5039190702343719, + "grad_norm": 0.5178211331367493, + "learning_rate": 9.974130275300205e-05, + "loss": 0.2444, + "step": 38950 + }, + { + "epoch": 1.5043051855283989, + "grad_norm": 0.25908491015434265, + "learning_rate": 9.971556173340026e-05, + "loss": 0.1643, + "step": 38960 + }, + { + "epoch": 1.5046913008224254, + "grad_norm": 1.1818209886550903, + "learning_rate": 9.968982071379848e-05, + "loss": 0.3187, + "step": 38970 + }, + { + "epoch": 1.5050774161164524, + "grad_norm": 0.13186976313591003, + "learning_rate": 9.966407969419669e-05, + "loss": 0.1982, + "step": 38980 + }, + { + "epoch": 1.5054635314104792, + "grad_norm": 0.18049825727939606, + "learning_rate": 9.963833867459492e-05, + "loss": 0.1288, + "step": 38990 + }, + { + "epoch": 1.505849646704506, + "grad_norm": 0.30261853337287903, + "learning_rate": 9.961259765499312e-05, + "loss": 0.1704, + "step": 39000 + }, + { + "epoch": 1.5062357619985327, + "grad_norm": 2.1437973976135254, + "learning_rate": 9.958685663539133e-05, + "loss": 0.1272, + "step": 39010 + }, + { + "epoch": 1.5066218772925595, + "grad_norm": 2.2844271659851074, + "learning_rate": 9.956111561578954e-05, + "loss": 0.1314, + "step": 39020 + }, + { + "epoch": 1.5070079925865865, + "grad_norm": 1.5845297574996948, + "learning_rate": 9.953537459618776e-05, + "loss": 0.2023, + "step": 39030 + }, + { + "epoch": 1.507394107880613, + "grad_norm": 0.7256748676300049, + "learning_rate": 9.950963357658597e-05, + "loss": 0.4165, + "step": 39040 + }, + { + "epoch": 1.50778022317464, + "grad_norm": 1.7597005367279053, + "learning_rate": 9.948389255698418e-05, + "loss": 0.4209, + "step": 39050 + }, + { + "epoch": 1.5081663384686668, + "grad_norm": 1.5487171411514282, + "learning_rate": 9.945815153738241e-05, + "loss": 0.1918, + "step": 39060 + }, + { + "epoch": 1.5085524537626935, + "grad_norm": 0.1656871736049652, + "learning_rate": 9.943241051778062e-05, + "loss": 0.3513, + "step": 39070 + }, + { + "epoch": 1.5089385690567203, + "grad_norm": 0.1451992392539978, + "learning_rate": 9.940666949817882e-05, + "loss": 0.1922, + "step": 39080 + }, + { + "epoch": 1.509324684350747, + "grad_norm": 1.1572967767715454, + "learning_rate": 9.938092847857704e-05, + "loss": 0.4263, + "step": 39090 + }, + { + "epoch": 1.509710799644774, + "grad_norm": 0.8721522092819214, + "learning_rate": 9.935518745897525e-05, + "loss": 0.3499, + "step": 39100 + }, + { + "epoch": 1.5100969149388006, + "grad_norm": 2.8486688137054443, + "learning_rate": 9.932944643937346e-05, + "loss": 0.3567, + "step": 39110 + }, + { + "epoch": 1.5104830302328276, + "grad_norm": 1.1864535808563232, + "learning_rate": 9.930370541977169e-05, + "loss": 0.2546, + "step": 39120 + }, + { + "epoch": 1.5108691455268544, + "grad_norm": 0.4636247158050537, + "learning_rate": 9.92779644001699e-05, + "loss": 0.2761, + "step": 39130 + }, + { + "epoch": 1.5112552608208811, + "grad_norm": 0.6326389908790588, + "learning_rate": 9.925222338056812e-05, + "loss": 0.252, + "step": 39140 + }, + { + "epoch": 1.511641376114908, + "grad_norm": 2.1110761165618896, + "learning_rate": 9.922648236096632e-05, + "loss": 0.5427, + "step": 39150 + }, + { + "epoch": 1.5120274914089347, + "grad_norm": 1.0927456617355347, + "learning_rate": 9.920074134136453e-05, + "loss": 0.3336, + "step": 39160 + }, + { + "epoch": 1.5124136067029617, + "grad_norm": 3.6224989891052246, + "learning_rate": 9.917500032176274e-05, + "loss": 0.3689, + "step": 39170 + }, + { + "epoch": 1.5127997219969882, + "grad_norm": 2.422492265701294, + "learning_rate": 9.914925930216097e-05, + "loss": 0.3924, + "step": 39180 + }, + { + "epoch": 1.5131858372910152, + "grad_norm": 0.22901678085327148, + "learning_rate": 9.912351828255918e-05, + "loss": 0.3082, + "step": 39190 + }, + { + "epoch": 1.5135719525850417, + "grad_norm": 0.581598162651062, + "learning_rate": 9.90977772629574e-05, + "loss": 0.3007, + "step": 39200 + }, + { + "epoch": 1.5139580678790687, + "grad_norm": 0.7196664214134216, + "learning_rate": 9.907203624335561e-05, + "loss": 0.2425, + "step": 39210 + }, + { + "epoch": 1.5143441831730955, + "grad_norm": 2.5246760845184326, + "learning_rate": 9.904629522375381e-05, + "loss": 0.3085, + "step": 39220 + }, + { + "epoch": 1.5147302984671223, + "grad_norm": 1.8397210836410522, + "learning_rate": 9.902055420415202e-05, + "loss": 0.2388, + "step": 39230 + }, + { + "epoch": 1.515116413761149, + "grad_norm": 0.5940410494804382, + "learning_rate": 9.899481318455025e-05, + "loss": 0.1184, + "step": 39240 + }, + { + "epoch": 1.5155025290551758, + "grad_norm": 1.5379250049591064, + "learning_rate": 9.896907216494846e-05, + "loss": 0.3253, + "step": 39250 + }, + { + "epoch": 1.5158886443492028, + "grad_norm": 0.8007088303565979, + "learning_rate": 9.894333114534668e-05, + "loss": 0.3057, + "step": 39260 + }, + { + "epoch": 1.5162747596432293, + "grad_norm": 0.9321600198745728, + "learning_rate": 9.891759012574489e-05, + "loss": 0.3249, + "step": 39270 + }, + { + "epoch": 1.5166608749372563, + "grad_norm": 1.5519977807998657, + "learning_rate": 9.88918491061431e-05, + "loss": 0.3368, + "step": 39280 + }, + { + "epoch": 1.517046990231283, + "grad_norm": 0.37695613503456116, + "learning_rate": 9.88661080865413e-05, + "loss": 0.1305, + "step": 39290 + }, + { + "epoch": 1.5174331055253099, + "grad_norm": 1.9956984519958496, + "learning_rate": 9.884036706693952e-05, + "loss": 0.4854, + "step": 39300 + }, + { + "epoch": 1.5178192208193366, + "grad_norm": 1.6110823154449463, + "learning_rate": 9.881462604733774e-05, + "loss": 0.2504, + "step": 39310 + }, + { + "epoch": 1.5182053361133634, + "grad_norm": 0.41702982783317566, + "learning_rate": 9.878888502773596e-05, + "loss": 0.1532, + "step": 39320 + }, + { + "epoch": 1.5185914514073904, + "grad_norm": 2.3595950603485107, + "learning_rate": 9.876314400813417e-05, + "loss": 0.309, + "step": 39330 + }, + { + "epoch": 1.518977566701417, + "grad_norm": 1.1045889854431152, + "learning_rate": 9.873740298853238e-05, + "loss": 0.2858, + "step": 39340 + }, + { + "epoch": 1.519363681995444, + "grad_norm": 1.4641762971878052, + "learning_rate": 9.87116619689306e-05, + "loss": 0.3159, + "step": 39350 + }, + { + "epoch": 1.5197497972894707, + "grad_norm": 1.0977380275726318, + "learning_rate": 9.868592094932881e-05, + "loss": 0.229, + "step": 39360 + }, + { + "epoch": 1.5201359125834975, + "grad_norm": 0.5620018839836121, + "learning_rate": 9.866017992972702e-05, + "loss": 0.2642, + "step": 39370 + }, + { + "epoch": 1.5205220278775242, + "grad_norm": 0.36996108293533325, + "learning_rate": 9.863443891012524e-05, + "loss": 0.2314, + "step": 39380 + }, + { + "epoch": 1.520908143171551, + "grad_norm": 0.9804339408874512, + "learning_rate": 9.860869789052345e-05, + "loss": 0.2399, + "step": 39390 + }, + { + "epoch": 1.521294258465578, + "grad_norm": 0.4157778024673462, + "learning_rate": 9.858295687092166e-05, + "loss": 0.3006, + "step": 39400 + }, + { + "epoch": 1.5216803737596045, + "grad_norm": 0.5548539161682129, + "learning_rate": 9.855721585131988e-05, + "loss": 0.1816, + "step": 39410 + }, + { + "epoch": 1.5220664890536315, + "grad_norm": 0.9476989507675171, + "learning_rate": 9.853147483171809e-05, + "loss": 0.3943, + "step": 39420 + }, + { + "epoch": 1.522452604347658, + "grad_norm": 0.5183500647544861, + "learning_rate": 9.85057338121163e-05, + "loss": 0.1629, + "step": 39430 + }, + { + "epoch": 1.522838719641685, + "grad_norm": 1.4146567583084106, + "learning_rate": 9.847999279251452e-05, + "loss": 0.3829, + "step": 39440 + }, + { + "epoch": 1.5232248349357118, + "grad_norm": 2.4880552291870117, + "learning_rate": 9.845425177291273e-05, + "loss": 0.3052, + "step": 39450 + }, + { + "epoch": 1.5236109502297386, + "grad_norm": 0.43657195568084717, + "learning_rate": 9.842851075331094e-05, + "loss": 0.1691, + "step": 39460 + }, + { + "epoch": 1.5239970655237653, + "grad_norm": 0.13798825442790985, + "learning_rate": 9.840276973370916e-05, + "loss": 0.3217, + "step": 39470 + }, + { + "epoch": 1.5243831808177921, + "grad_norm": 0.8712138533592224, + "learning_rate": 9.837702871410737e-05, + "loss": 0.2702, + "step": 39480 + }, + { + "epoch": 1.524769296111819, + "grad_norm": 1.155957579612732, + "learning_rate": 9.83512876945056e-05, + "loss": 0.2693, + "step": 39490 + }, + { + "epoch": 1.5251554114058457, + "grad_norm": 1.194615364074707, + "learning_rate": 9.83255466749038e-05, + "loss": 0.1541, + "step": 39500 + }, + { + "epoch": 1.5255415266998726, + "grad_norm": 1.8287533521652222, + "learning_rate": 9.829980565530201e-05, + "loss": 0.222, + "step": 39510 + }, + { + "epoch": 1.5259276419938994, + "grad_norm": 0.5741322636604309, + "learning_rate": 9.827406463570022e-05, + "loss": 0.2348, + "step": 39520 + }, + { + "epoch": 1.5263137572879262, + "grad_norm": 0.21659214794635773, + "learning_rate": 9.824832361609844e-05, + "loss": 0.2698, + "step": 39530 + }, + { + "epoch": 1.526699872581953, + "grad_norm": 0.953101396560669, + "learning_rate": 9.822258259649665e-05, + "loss": 0.1222, + "step": 39540 + }, + { + "epoch": 1.5270859878759797, + "grad_norm": 2.764655113220215, + "learning_rate": 9.819684157689486e-05, + "loss": 0.4735, + "step": 39550 + }, + { + "epoch": 1.5274721031700067, + "grad_norm": 0.8794540166854858, + "learning_rate": 9.817110055729309e-05, + "loss": 0.1588, + "step": 39560 + }, + { + "epoch": 1.5278582184640332, + "grad_norm": 1.0971317291259766, + "learning_rate": 9.814535953769129e-05, + "loss": 0.302, + "step": 39570 + }, + { + "epoch": 1.5282443337580602, + "grad_norm": 0.6677056550979614, + "learning_rate": 9.81196185180895e-05, + "loss": 0.3318, + "step": 39580 + }, + { + "epoch": 1.528630449052087, + "grad_norm": 1.276684045791626, + "learning_rate": 9.809387749848772e-05, + "loss": 0.2329, + "step": 39590 + }, + { + "epoch": 1.5290165643461138, + "grad_norm": 0.9192230701446533, + "learning_rate": 9.806813647888593e-05, + "loss": 0.3657, + "step": 39600 + }, + { + "epoch": 1.5294026796401405, + "grad_norm": 1.15361750125885, + "learning_rate": 9.804239545928414e-05, + "loss": 0.2462, + "step": 39610 + }, + { + "epoch": 1.5297887949341673, + "grad_norm": 1.0608477592468262, + "learning_rate": 9.801665443968237e-05, + "loss": 0.1601, + "step": 39620 + }, + { + "epoch": 1.5301749102281943, + "grad_norm": 0.07210031896829605, + "learning_rate": 9.799091342008058e-05, + "loss": 0.2454, + "step": 39630 + }, + { + "epoch": 1.5305610255222208, + "grad_norm": 0.9941250681877136, + "learning_rate": 9.796517240047878e-05, + "loss": 0.2783, + "step": 39640 + }, + { + "epoch": 1.5309471408162478, + "grad_norm": 1.3414831161499023, + "learning_rate": 9.7939431380877e-05, + "loss": 0.2342, + "step": 39650 + }, + { + "epoch": 1.5313332561102744, + "grad_norm": 1.5854885578155518, + "learning_rate": 9.791369036127521e-05, + "loss": 0.3444, + "step": 39660 + }, + { + "epoch": 1.5317193714043014, + "grad_norm": 1.068955659866333, + "learning_rate": 9.788794934167342e-05, + "loss": 0.2587, + "step": 39670 + }, + { + "epoch": 1.5321054866983281, + "grad_norm": 4.630382537841797, + "learning_rate": 9.786220832207165e-05, + "loss": 0.2821, + "step": 39680 + }, + { + "epoch": 1.532491601992355, + "grad_norm": 1.2920769453048706, + "learning_rate": 9.783646730246986e-05, + "loss": 0.2169, + "step": 39690 + }, + { + "epoch": 1.5328777172863817, + "grad_norm": 1.6936739683151245, + "learning_rate": 9.781072628286808e-05, + "loss": 0.2538, + "step": 39700 + }, + { + "epoch": 1.5332638325804084, + "grad_norm": 4.246237277984619, + "learning_rate": 9.778498526326629e-05, + "loss": 0.305, + "step": 39710 + }, + { + "epoch": 1.5336499478744354, + "grad_norm": 2.638601779937744, + "learning_rate": 9.775924424366449e-05, + "loss": 0.2168, + "step": 39720 + }, + { + "epoch": 1.534036063168462, + "grad_norm": 1.4180443286895752, + "learning_rate": 9.77335032240627e-05, + "loss": 0.2139, + "step": 39730 + }, + { + "epoch": 1.534422178462489, + "grad_norm": 1.251378059387207, + "learning_rate": 9.770776220446093e-05, + "loss": 0.2714, + "step": 39740 + }, + { + "epoch": 1.5348082937565157, + "grad_norm": 0.5204187035560608, + "learning_rate": 9.768202118485914e-05, + "loss": 0.2375, + "step": 39750 + }, + { + "epoch": 1.5351944090505425, + "grad_norm": 0.4135078489780426, + "learning_rate": 9.765628016525736e-05, + "loss": 0.442, + "step": 39760 + }, + { + "epoch": 1.5355805243445693, + "grad_norm": 1.9822927713394165, + "learning_rate": 9.763053914565557e-05, + "loss": 0.2238, + "step": 39770 + }, + { + "epoch": 1.535966639638596, + "grad_norm": 1.6851792335510254, + "learning_rate": 9.760479812605378e-05, + "loss": 0.4886, + "step": 39780 + }, + { + "epoch": 1.536352754932623, + "grad_norm": 1.9185349941253662, + "learning_rate": 9.757905710645198e-05, + "loss": 0.4167, + "step": 39790 + }, + { + "epoch": 1.5367388702266496, + "grad_norm": 0.2348870486021042, + "learning_rate": 9.75533160868502e-05, + "loss": 0.1849, + "step": 39800 + }, + { + "epoch": 1.5371249855206766, + "grad_norm": 0.2750287652015686, + "learning_rate": 9.752757506724842e-05, + "loss": 0.4298, + "step": 39810 + }, + { + "epoch": 1.5375111008147033, + "grad_norm": 0.28703558444976807, + "learning_rate": 9.750183404764664e-05, + "loss": 0.1431, + "step": 39820 + }, + { + "epoch": 1.53789721610873, + "grad_norm": 0.743290364742279, + "learning_rate": 9.747609302804485e-05, + "loss": 0.1993, + "step": 39830 + }, + { + "epoch": 1.5382833314027569, + "grad_norm": 0.8678677082061768, + "learning_rate": 9.745035200844306e-05, + "loss": 0.1695, + "step": 39840 + }, + { + "epoch": 1.5386694466967836, + "grad_norm": 0.18160143494606018, + "learning_rate": 9.742461098884128e-05, + "loss": 0.2615, + "step": 39850 + }, + { + "epoch": 1.5390555619908106, + "grad_norm": 0.08404600620269775, + "learning_rate": 9.739886996923947e-05, + "loss": 0.2523, + "step": 39860 + }, + { + "epoch": 1.5394416772848372, + "grad_norm": 3.393118381500244, + "learning_rate": 9.73731289496377e-05, + "loss": 0.3292, + "step": 39870 + }, + { + "epoch": 1.5398277925788642, + "grad_norm": 0.3148004412651062, + "learning_rate": 9.734738793003591e-05, + "loss": 0.0808, + "step": 39880 + }, + { + "epoch": 1.5402139078728907, + "grad_norm": 0.8423801064491272, + "learning_rate": 9.732164691043413e-05, + "loss": 0.1088, + "step": 39890 + }, + { + "epoch": 1.5406000231669177, + "grad_norm": 2.407147169113159, + "learning_rate": 9.729590589083234e-05, + "loss": 0.286, + "step": 39900 + }, + { + "epoch": 1.5409861384609445, + "grad_norm": 0.8560749292373657, + "learning_rate": 9.727016487123055e-05, + "loss": 0.1674, + "step": 39910 + }, + { + "epoch": 1.5413722537549712, + "grad_norm": 0.9724945425987244, + "learning_rate": 9.724442385162877e-05, + "loss": 0.232, + "step": 39920 + }, + { + "epoch": 1.541758369048998, + "grad_norm": 1.6911234855651855, + "learning_rate": 9.721868283202698e-05, + "loss": 0.3182, + "step": 39930 + }, + { + "epoch": 1.5421444843430248, + "grad_norm": 0.8703460693359375, + "learning_rate": 9.71929418124252e-05, + "loss": 0.3565, + "step": 39940 + }, + { + "epoch": 1.5425305996370517, + "grad_norm": 1.4052613973617554, + "learning_rate": 9.716720079282341e-05, + "loss": 0.2037, + "step": 39950 + }, + { + "epoch": 1.5429167149310783, + "grad_norm": 0.2802957594394684, + "learning_rate": 9.714145977322162e-05, + "loss": 0.2596, + "step": 39960 + }, + { + "epoch": 1.5433028302251053, + "grad_norm": 0.22114449739456177, + "learning_rate": 9.711571875361983e-05, + "loss": 0.2302, + "step": 39970 + }, + { + "epoch": 1.543688945519132, + "grad_norm": 0.8095982074737549, + "learning_rate": 9.708997773401805e-05, + "loss": 0.2428, + "step": 39980 + }, + { + "epoch": 1.5440750608131588, + "grad_norm": 1.0990866422653198, + "learning_rate": 9.706423671441626e-05, + "loss": 0.3669, + "step": 39990 + }, + { + "epoch": 1.5444611761071856, + "grad_norm": 0.8309730291366577, + "learning_rate": 9.703849569481447e-05, + "loss": 0.2038, + "step": 40000 + }, + { + "epoch": 1.5448472914012124, + "grad_norm": 2.09492826461792, + "learning_rate": 9.701275467521269e-05, + "loss": 0.2934, + "step": 40010 + }, + { + "epoch": 1.5452334066952393, + "grad_norm": 0.33550217747688293, + "learning_rate": 9.69870136556109e-05, + "loss": 0.1582, + "step": 40020 + }, + { + "epoch": 1.5456195219892659, + "grad_norm": 1.6839581727981567, + "learning_rate": 9.696127263600911e-05, + "loss": 0.2084, + "step": 40030 + }, + { + "epoch": 1.5460056372832929, + "grad_norm": 1.293013095855713, + "learning_rate": 9.693553161640733e-05, + "loss": 0.1871, + "step": 40040 + }, + { + "epoch": 1.5463917525773194, + "grad_norm": 0.08080088347196579, + "learning_rate": 9.690979059680554e-05, + "loss": 0.4942, + "step": 40050 + }, + { + "epoch": 1.5467778678713464, + "grad_norm": 1.7121747732162476, + "learning_rate": 9.688404957720377e-05, + "loss": 0.4819, + "step": 40060 + }, + { + "epoch": 1.5471639831653732, + "grad_norm": 0.6426690816879272, + "learning_rate": 9.685830855760197e-05, + "loss": 0.2393, + "step": 40070 + }, + { + "epoch": 1.5475500984594, + "grad_norm": 1.5436782836914062, + "learning_rate": 9.683256753800018e-05, + "loss": 0.2666, + "step": 40080 + }, + { + "epoch": 1.547936213753427, + "grad_norm": 1.9316864013671875, + "learning_rate": 9.68068265183984e-05, + "loss": 0.1629, + "step": 40090 + }, + { + "epoch": 1.5483223290474535, + "grad_norm": 0.7503604292869568, + "learning_rate": 9.678108549879661e-05, + "loss": 0.2038, + "step": 40100 + }, + { + "epoch": 1.5487084443414805, + "grad_norm": 0.9826010465621948, + "learning_rate": 9.675534447919482e-05, + "loss": 0.1219, + "step": 40110 + }, + { + "epoch": 1.549094559635507, + "grad_norm": 1.6828583478927612, + "learning_rate": 9.672960345959305e-05, + "loss": 0.2283, + "step": 40120 + }, + { + "epoch": 1.549480674929534, + "grad_norm": 1.688306212425232, + "learning_rate": 9.670386243999126e-05, + "loss": 0.1785, + "step": 40130 + }, + { + "epoch": 1.5498667902235608, + "grad_norm": 1.6113176345825195, + "learning_rate": 9.667812142038946e-05, + "loss": 0.4171, + "step": 40140 + }, + { + "epoch": 1.5502529055175875, + "grad_norm": 0.33999955654144287, + "learning_rate": 9.665238040078767e-05, + "loss": 0.1528, + "step": 40150 + }, + { + "epoch": 1.5506390208116143, + "grad_norm": 0.7666870355606079, + "learning_rate": 9.662663938118589e-05, + "loss": 0.2258, + "step": 40160 + }, + { + "epoch": 1.551025136105641, + "grad_norm": 1.123090386390686, + "learning_rate": 9.66008983615841e-05, + "loss": 0.207, + "step": 40170 + }, + { + "epoch": 1.551411251399668, + "grad_norm": 1.4472588300704956, + "learning_rate": 9.657515734198233e-05, + "loss": 0.2083, + "step": 40180 + }, + { + "epoch": 1.5517973666936946, + "grad_norm": 0.6818589568138123, + "learning_rate": 9.654941632238054e-05, + "loss": 0.1821, + "step": 40190 + }, + { + "epoch": 1.5521834819877216, + "grad_norm": 2.31847882270813, + "learning_rate": 9.652367530277875e-05, + "loss": 0.5128, + "step": 40200 + }, + { + "epoch": 1.5525695972817484, + "grad_norm": 2.492560386657715, + "learning_rate": 9.649793428317695e-05, + "loss": 0.2461, + "step": 40210 + }, + { + "epoch": 1.5529557125757751, + "grad_norm": 0.6917121410369873, + "learning_rate": 9.647219326357517e-05, + "loss": 0.2829, + "step": 40220 + }, + { + "epoch": 1.553341827869802, + "grad_norm": 1.1944900751113892, + "learning_rate": 9.644645224397338e-05, + "loss": 0.2324, + "step": 40230 + }, + { + "epoch": 1.5537279431638287, + "grad_norm": 0.12343896180391312, + "learning_rate": 9.642071122437161e-05, + "loss": 0.1258, + "step": 40240 + }, + { + "epoch": 1.5541140584578557, + "grad_norm": 1.8493744134902954, + "learning_rate": 9.639497020476982e-05, + "loss": 0.2707, + "step": 40250 + }, + { + "epoch": 1.5545001737518822, + "grad_norm": 1.4696533679962158, + "learning_rate": 9.636922918516803e-05, + "loss": 0.199, + "step": 40260 + }, + { + "epoch": 1.5548862890459092, + "grad_norm": 0.7716092467308044, + "learning_rate": 9.634348816556625e-05, + "loss": 0.2869, + "step": 40270 + }, + { + "epoch": 1.5552724043399357, + "grad_norm": 1.3153057098388672, + "learning_rate": 9.631774714596446e-05, + "loss": 0.2164, + "step": 40280 + }, + { + "epoch": 1.5556585196339627, + "grad_norm": 1.2615609169006348, + "learning_rate": 9.629200612636266e-05, + "loss": 0.3418, + "step": 40290 + }, + { + "epoch": 1.5560446349279895, + "grad_norm": 1.7999435663223267, + "learning_rate": 9.626626510676089e-05, + "loss": 0.3062, + "step": 40300 + }, + { + "epoch": 1.5564307502220163, + "grad_norm": 1.5992902517318726, + "learning_rate": 9.62405240871591e-05, + "loss": 0.3037, + "step": 40310 + }, + { + "epoch": 1.556816865516043, + "grad_norm": 0.7515442967414856, + "learning_rate": 9.621478306755731e-05, + "loss": 0.2071, + "step": 40320 + }, + { + "epoch": 1.5572029808100698, + "grad_norm": 2.5413167476654053, + "learning_rate": 9.618904204795553e-05, + "loss": 0.569, + "step": 40330 + }, + { + "epoch": 1.5575890961040968, + "grad_norm": 2.303255319595337, + "learning_rate": 9.616330102835374e-05, + "loss": 0.201, + "step": 40340 + }, + { + "epoch": 1.5579752113981233, + "grad_norm": 1.0700573921203613, + "learning_rate": 9.613756000875195e-05, + "loss": 0.1675, + "step": 40350 + }, + { + "epoch": 1.5583613266921503, + "grad_norm": 1.3028898239135742, + "learning_rate": 9.611181898915015e-05, + "loss": 0.3423, + "step": 40360 + }, + { + "epoch": 1.558747441986177, + "grad_norm": 2.0213449001312256, + "learning_rate": 9.608607796954838e-05, + "loss": 0.161, + "step": 40370 + }, + { + "epoch": 1.5591335572802039, + "grad_norm": 0.15942837297916412, + "learning_rate": 9.60603369499466e-05, + "loss": 0.2067, + "step": 40380 + }, + { + "epoch": 1.5595196725742306, + "grad_norm": 0.8567324280738831, + "learning_rate": 9.603459593034481e-05, + "loss": 0.1156, + "step": 40390 + }, + { + "epoch": 1.5599057878682574, + "grad_norm": 0.598947286605835, + "learning_rate": 9.600885491074302e-05, + "loss": 0.2741, + "step": 40400 + }, + { + "epoch": 1.5602919031622844, + "grad_norm": 0.41864535212516785, + "learning_rate": 9.598311389114123e-05, + "loss": 0.0979, + "step": 40410 + }, + { + "epoch": 1.560678018456311, + "grad_norm": 1.304883599281311, + "learning_rate": 9.595737287153945e-05, + "loss": 0.1525, + "step": 40420 + }, + { + "epoch": 1.561064133750338, + "grad_norm": 1.666935682296753, + "learning_rate": 9.593163185193766e-05, + "loss": 0.3392, + "step": 40430 + }, + { + "epoch": 1.5614502490443647, + "grad_norm": 0.44640687108039856, + "learning_rate": 9.590589083233587e-05, + "loss": 0.1946, + "step": 40440 + }, + { + "epoch": 1.5618363643383915, + "grad_norm": 0.8123475313186646, + "learning_rate": 9.588014981273409e-05, + "loss": 0.2985, + "step": 40450 + }, + { + "epoch": 1.5622224796324182, + "grad_norm": 1.7682442665100098, + "learning_rate": 9.58544087931323e-05, + "loss": 0.215, + "step": 40460 + }, + { + "epoch": 1.562608594926445, + "grad_norm": 0.8545176982879639, + "learning_rate": 9.582866777353051e-05, + "loss": 0.2513, + "step": 40470 + }, + { + "epoch": 1.562994710220472, + "grad_norm": 0.10042224079370499, + "learning_rate": 9.580292675392873e-05, + "loss": 0.1792, + "step": 40480 + }, + { + "epoch": 1.5633808255144985, + "grad_norm": 1.0059372186660767, + "learning_rate": 9.577718573432694e-05, + "loss": 0.2236, + "step": 40490 + }, + { + "epoch": 1.5637669408085255, + "grad_norm": 1.2795478105545044, + "learning_rate": 9.575144471472515e-05, + "loss": 0.2728, + "step": 40500 + }, + { + "epoch": 1.564153056102552, + "grad_norm": 1.099183201789856, + "learning_rate": 9.572570369512337e-05, + "loss": 0.2855, + "step": 40510 + }, + { + "epoch": 1.564539171396579, + "grad_norm": 1.0743390321731567, + "learning_rate": 9.569996267552158e-05, + "loss": 0.2439, + "step": 40520 + }, + { + "epoch": 1.5649252866906058, + "grad_norm": 1.1840991973876953, + "learning_rate": 9.56742216559198e-05, + "loss": 0.1416, + "step": 40530 + }, + { + "epoch": 1.5653114019846326, + "grad_norm": 0.4638634920120239, + "learning_rate": 9.564848063631801e-05, + "loss": 0.254, + "step": 40540 + }, + { + "epoch": 1.5656975172786594, + "grad_norm": 3.084916830062866, + "learning_rate": 9.562273961671623e-05, + "loss": 0.2098, + "step": 40550 + }, + { + "epoch": 1.5660836325726861, + "grad_norm": 0.666347324848175, + "learning_rate": 9.559699859711443e-05, + "loss": 0.3649, + "step": 40560 + }, + { + "epoch": 1.5664697478667131, + "grad_norm": 1.1770634651184082, + "learning_rate": 9.557125757751265e-05, + "loss": 0.2069, + "step": 40570 + }, + { + "epoch": 1.5668558631607397, + "grad_norm": 0.9030371308326721, + "learning_rate": 9.554551655791086e-05, + "loss": 0.1174, + "step": 40580 + }, + { + "epoch": 1.5672419784547666, + "grad_norm": 0.053270868957042694, + "learning_rate": 9.551977553830907e-05, + "loss": 0.1304, + "step": 40590 + }, + { + "epoch": 1.5676280937487934, + "grad_norm": 2.4098777770996094, + "learning_rate": 9.549403451870729e-05, + "loss": 0.2654, + "step": 40600 + }, + { + "epoch": 1.5680142090428202, + "grad_norm": 0.7078404426574707, + "learning_rate": 9.54682934991055e-05, + "loss": 0.2203, + "step": 40610 + }, + { + "epoch": 1.568400324336847, + "grad_norm": 1.1814978122711182, + "learning_rate": 9.544255247950373e-05, + "loss": 0.2986, + "step": 40620 + }, + { + "epoch": 1.5687864396308737, + "grad_norm": 2.29439377784729, + "learning_rate": 9.541681145990193e-05, + "loss": 0.3143, + "step": 40630 + }, + { + "epoch": 1.5691725549249007, + "grad_norm": 0.07921203970909119, + "learning_rate": 9.539107044030014e-05, + "loss": 0.151, + "step": 40640 + }, + { + "epoch": 1.5695586702189273, + "grad_norm": 0.2058558166027069, + "learning_rate": 9.536532942069835e-05, + "loss": 0.2169, + "step": 40650 + }, + { + "epoch": 1.5699447855129542, + "grad_norm": 0.4650769531726837, + "learning_rate": 9.533958840109657e-05, + "loss": 0.136, + "step": 40660 + }, + { + "epoch": 1.570330900806981, + "grad_norm": 1.0603867769241333, + "learning_rate": 9.531384738149478e-05, + "loss": 0.2396, + "step": 40670 + }, + { + "epoch": 1.5707170161010078, + "grad_norm": 2.279155731201172, + "learning_rate": 9.528810636189301e-05, + "loss": 0.2679, + "step": 40680 + }, + { + "epoch": 1.5711031313950345, + "grad_norm": 1.8186841011047363, + "learning_rate": 9.526236534229122e-05, + "loss": 0.2592, + "step": 40690 + }, + { + "epoch": 1.5714892466890613, + "grad_norm": 1.9804840087890625, + "learning_rate": 9.523662432268943e-05, + "loss": 0.3184, + "step": 40700 + }, + { + "epoch": 1.5718753619830883, + "grad_norm": 2.3474323749542236, + "learning_rate": 9.521088330308763e-05, + "loss": 0.1892, + "step": 40710 + }, + { + "epoch": 1.5722614772771148, + "grad_norm": 0.5251644253730774, + "learning_rate": 9.518514228348585e-05, + "loss": 0.1306, + "step": 40720 + }, + { + "epoch": 1.5726475925711418, + "grad_norm": 0.9444900155067444, + "learning_rate": 9.515940126388406e-05, + "loss": 0.1888, + "step": 40730 + }, + { + "epoch": 1.5730337078651684, + "grad_norm": 1.1808644533157349, + "learning_rate": 9.513366024428229e-05, + "loss": 0.1249, + "step": 40740 + }, + { + "epoch": 1.5734198231591954, + "grad_norm": 2.716831684112549, + "learning_rate": 9.51079192246805e-05, + "loss": 0.2544, + "step": 40750 + }, + { + "epoch": 1.5738059384532221, + "grad_norm": 1.257308006286621, + "learning_rate": 9.508217820507871e-05, + "loss": 0.1292, + "step": 40760 + }, + { + "epoch": 1.574192053747249, + "grad_norm": 2.080162525177002, + "learning_rate": 9.505643718547693e-05, + "loss": 0.3814, + "step": 40770 + }, + { + "epoch": 1.5745781690412757, + "grad_norm": 1.107872486114502, + "learning_rate": 9.503069616587513e-05, + "loss": 0.2074, + "step": 40780 + }, + { + "epoch": 1.5749642843353024, + "grad_norm": 1.0006227493286133, + "learning_rate": 9.500495514627334e-05, + "loss": 0.2384, + "step": 40790 + }, + { + "epoch": 1.5753503996293294, + "grad_norm": 0.3345160484313965, + "learning_rate": 9.497921412667157e-05, + "loss": 0.181, + "step": 40800 + }, + { + "epoch": 1.575736514923356, + "grad_norm": 1.2941926717758179, + "learning_rate": 9.495347310706978e-05, + "loss": 0.2312, + "step": 40810 + }, + { + "epoch": 1.576122630217383, + "grad_norm": 1.3869580030441284, + "learning_rate": 9.4927732087468e-05, + "loss": 0.2107, + "step": 40820 + }, + { + "epoch": 1.5765087455114097, + "grad_norm": 1.9948787689208984, + "learning_rate": 9.49019910678662e-05, + "loss": 0.2736, + "step": 40830 + }, + { + "epoch": 1.5768948608054365, + "grad_norm": 0.22069145739078522, + "learning_rate": 9.487625004826442e-05, + "loss": 0.3173, + "step": 40840 + }, + { + "epoch": 1.5772809760994633, + "grad_norm": 0.61549311876297, + "learning_rate": 9.485050902866262e-05, + "loss": 0.1437, + "step": 40850 + }, + { + "epoch": 1.57766709139349, + "grad_norm": 0.20520836114883423, + "learning_rate": 9.482476800906083e-05, + "loss": 0.1247, + "step": 40860 + }, + { + "epoch": 1.578053206687517, + "grad_norm": 2.1745588779449463, + "learning_rate": 9.479902698945906e-05, + "loss": 0.366, + "step": 40870 + }, + { + "epoch": 1.5784393219815436, + "grad_norm": 1.1127387285232544, + "learning_rate": 9.477328596985727e-05, + "loss": 0.1202, + "step": 40880 + }, + { + "epoch": 1.5788254372755706, + "grad_norm": 0.598258376121521, + "learning_rate": 9.474754495025549e-05, + "loss": 0.3917, + "step": 40890 + }, + { + "epoch": 1.5792115525695973, + "grad_norm": 1.0376511812210083, + "learning_rate": 9.47218039306537e-05, + "loss": 0.1372, + "step": 40900 + }, + { + "epoch": 1.579597667863624, + "grad_norm": 1.7164186239242554, + "learning_rate": 9.469606291105191e-05, + "loss": 0.3143, + "step": 40910 + }, + { + "epoch": 1.5799837831576509, + "grad_norm": 1.7929835319519043, + "learning_rate": 9.467032189145013e-05, + "loss": 0.2864, + "step": 40920 + }, + { + "epoch": 1.5803698984516776, + "grad_norm": 0.551222026348114, + "learning_rate": 9.464458087184834e-05, + "loss": 0.1173, + "step": 40930 + }, + { + "epoch": 1.5807560137457046, + "grad_norm": 1.5236003398895264, + "learning_rate": 9.461883985224655e-05, + "loss": 0.2031, + "step": 40940 + }, + { + "epoch": 1.5811421290397312, + "grad_norm": 0.690719723701477, + "learning_rate": 9.459309883264477e-05, + "loss": 0.1913, + "step": 40950 + }, + { + "epoch": 1.5815282443337582, + "grad_norm": 0.4199884533882141, + "learning_rate": 9.456735781304298e-05, + "loss": 0.279, + "step": 40960 + }, + { + "epoch": 1.5819143596277847, + "grad_norm": 0.7393648028373718, + "learning_rate": 9.454161679344119e-05, + "loss": 0.2315, + "step": 40970 + }, + { + "epoch": 1.5823004749218117, + "grad_norm": 0.09372472018003464, + "learning_rate": 9.45158757738394e-05, + "loss": 0.2585, + "step": 40980 + }, + { + "epoch": 1.5826865902158385, + "grad_norm": 0.37245264649391174, + "learning_rate": 9.449013475423762e-05, + "loss": 0.239, + "step": 40990 + }, + { + "epoch": 1.5830727055098652, + "grad_norm": 1.8006244897842407, + "learning_rate": 9.446439373463583e-05, + "loss": 0.3497, + "step": 41000 + }, + { + "epoch": 1.583458820803892, + "grad_norm": 1.4421888589859009, + "learning_rate": 9.443865271503405e-05, + "loss": 0.2014, + "step": 41010 + }, + { + "epoch": 1.5838449360979188, + "grad_norm": 1.8858803510665894, + "learning_rate": 9.441291169543226e-05, + "loss": 0.1816, + "step": 41020 + }, + { + "epoch": 1.5842310513919458, + "grad_norm": 1.108573317527771, + "learning_rate": 9.438717067583047e-05, + "loss": 0.1869, + "step": 41030 + }, + { + "epoch": 1.5846171666859723, + "grad_norm": 0.34841394424438477, + "learning_rate": 9.436142965622869e-05, + "loss": 0.3034, + "step": 41040 + }, + { + "epoch": 1.5850032819799993, + "grad_norm": 7.2845964431762695, + "learning_rate": 9.433568863662691e-05, + "loss": 0.2659, + "step": 41050 + }, + { + "epoch": 1.585389397274026, + "grad_norm": 0.4753032922744751, + "learning_rate": 9.430994761702511e-05, + "loss": 0.2539, + "step": 41060 + }, + { + "epoch": 1.5857755125680528, + "grad_norm": 1.590050458908081, + "learning_rate": 9.428420659742333e-05, + "loss": 0.2373, + "step": 41070 + }, + { + "epoch": 1.5861616278620796, + "grad_norm": 0.6979599595069885, + "learning_rate": 9.425846557782154e-05, + "loss": 0.1931, + "step": 41080 + }, + { + "epoch": 1.5865477431561064, + "grad_norm": 0.6384108066558838, + "learning_rate": 9.423272455821975e-05, + "loss": 0.2922, + "step": 41090 + }, + { + "epoch": 1.5869338584501334, + "grad_norm": 0.4752826392650604, + "learning_rate": 9.420698353861797e-05, + "loss": 0.2616, + "step": 41100 + }, + { + "epoch": 1.58731997374416, + "grad_norm": 0.7333683371543884, + "learning_rate": 9.418124251901618e-05, + "loss": 0.1568, + "step": 41110 + }, + { + "epoch": 1.5877060890381869, + "grad_norm": 0.09763200581073761, + "learning_rate": 9.41555014994144e-05, + "loss": 0.1936, + "step": 41120 + }, + { + "epoch": 1.5880922043322137, + "grad_norm": 2.4087512493133545, + "learning_rate": 9.41297604798126e-05, + "loss": 0.2408, + "step": 41130 + }, + { + "epoch": 1.5884783196262404, + "grad_norm": 2.014822244644165, + "learning_rate": 9.410401946021082e-05, + "loss": 0.2883, + "step": 41140 + }, + { + "epoch": 1.5888644349202672, + "grad_norm": 1.1764864921569824, + "learning_rate": 9.407827844060903e-05, + "loss": 0.3198, + "step": 41150 + }, + { + "epoch": 1.589250550214294, + "grad_norm": 0.807905912399292, + "learning_rate": 9.405253742100725e-05, + "loss": 0.3724, + "step": 41160 + }, + { + "epoch": 1.589636665508321, + "grad_norm": 0.9179816842079163, + "learning_rate": 9.402679640140546e-05, + "loss": 0.1429, + "step": 41170 + }, + { + "epoch": 1.5900227808023475, + "grad_norm": 1.3134746551513672, + "learning_rate": 9.400105538180369e-05, + "loss": 0.2973, + "step": 41180 + }, + { + "epoch": 1.5904088960963745, + "grad_norm": 0.08131751418113708, + "learning_rate": 9.39753143622019e-05, + "loss": 0.1627, + "step": 41190 + }, + { + "epoch": 1.590795011390401, + "grad_norm": 0.23568874597549438, + "learning_rate": 9.39495733426001e-05, + "loss": 0.3159, + "step": 41200 + }, + { + "epoch": 1.591181126684428, + "grad_norm": 1.1452207565307617, + "learning_rate": 9.392383232299831e-05, + "loss": 0.1761, + "step": 41210 + }, + { + "epoch": 1.5915672419784548, + "grad_norm": 1.5560107231140137, + "learning_rate": 9.389809130339653e-05, + "loss": 0.2173, + "step": 41220 + }, + { + "epoch": 1.5919533572724816, + "grad_norm": 1.7568162679672241, + "learning_rate": 9.387235028379474e-05, + "loss": 0.1771, + "step": 41230 + }, + { + "epoch": 1.5923394725665083, + "grad_norm": 6.235257148742676, + "learning_rate": 9.384660926419297e-05, + "loss": 0.3207, + "step": 41240 + }, + { + "epoch": 1.592725587860535, + "grad_norm": 1.2336914539337158, + "learning_rate": 9.382086824459118e-05, + "loss": 0.1691, + "step": 41250 + }, + { + "epoch": 1.593111703154562, + "grad_norm": 0.06781507283449173, + "learning_rate": 9.379512722498939e-05, + "loss": 0.1485, + "step": 41260 + }, + { + "epoch": 1.5934978184485886, + "grad_norm": 0.44770199060440063, + "learning_rate": 9.37693862053876e-05, + "loss": 0.133, + "step": 41270 + }, + { + "epoch": 1.5938839337426156, + "grad_norm": 0.3812965750694275, + "learning_rate": 9.37436451857858e-05, + "loss": 0.2305, + "step": 41280 + }, + { + "epoch": 1.5942700490366424, + "grad_norm": 1.7562031745910645, + "learning_rate": 9.371790416618402e-05, + "loss": 0.265, + "step": 41290 + }, + { + "epoch": 1.5946561643306691, + "grad_norm": 0.3089204728603363, + "learning_rate": 9.369216314658225e-05, + "loss": 0.2583, + "step": 41300 + }, + { + "epoch": 1.595042279624696, + "grad_norm": 1.5295588970184326, + "learning_rate": 9.366642212698046e-05, + "loss": 0.1201, + "step": 41310 + }, + { + "epoch": 1.5954283949187227, + "grad_norm": 0.7704429626464844, + "learning_rate": 9.364068110737867e-05, + "loss": 0.1471, + "step": 41320 + }, + { + "epoch": 1.5958145102127497, + "grad_norm": 1.825547218322754, + "learning_rate": 9.361494008777689e-05, + "loss": 0.2964, + "step": 41330 + }, + { + "epoch": 1.5962006255067762, + "grad_norm": 2.4156250953674316, + "learning_rate": 9.35891990681751e-05, + "loss": 0.4332, + "step": 41340 + }, + { + "epoch": 1.5965867408008032, + "grad_norm": 0.8711603879928589, + "learning_rate": 9.35634580485733e-05, + "loss": 0.2037, + "step": 41350 + }, + { + "epoch": 1.5969728560948298, + "grad_norm": 0.5013506412506104, + "learning_rate": 9.353771702897151e-05, + "loss": 0.1852, + "step": 41360 + }, + { + "epoch": 1.5973589713888567, + "grad_norm": 1.9529963731765747, + "learning_rate": 9.351197600936974e-05, + "loss": 0.2809, + "step": 41370 + }, + { + "epoch": 1.5977450866828835, + "grad_norm": 1.7923181056976318, + "learning_rate": 9.348623498976795e-05, + "loss": 0.3918, + "step": 41380 + }, + { + "epoch": 1.5981312019769103, + "grad_norm": 0.45643335580825806, + "learning_rate": 9.346049397016617e-05, + "loss": 0.4161, + "step": 41390 + }, + { + "epoch": 1.5985173172709373, + "grad_norm": 0.4477383494377136, + "learning_rate": 9.343475295056438e-05, + "loss": 0.202, + "step": 41400 + }, + { + "epoch": 1.5989034325649638, + "grad_norm": 1.1428645849227905, + "learning_rate": 9.340901193096259e-05, + "loss": 0.2271, + "step": 41410 + }, + { + "epoch": 1.5992895478589908, + "grad_norm": 1.8324707746505737, + "learning_rate": 9.338327091136079e-05, + "loss": 0.2261, + "step": 41420 + }, + { + "epoch": 1.5996756631530173, + "grad_norm": 1.3735613822937012, + "learning_rate": 9.335752989175902e-05, + "loss": 0.2394, + "step": 41430 + }, + { + "epoch": 1.6000617784470443, + "grad_norm": 4.176051139831543, + "learning_rate": 9.333178887215723e-05, + "loss": 0.3454, + "step": 41440 + }, + { + "epoch": 1.600447893741071, + "grad_norm": 0.1931024044752121, + "learning_rate": 9.330604785255545e-05, + "loss": 0.3191, + "step": 41450 + }, + { + "epoch": 1.6008340090350979, + "grad_norm": 0.867579996585846, + "learning_rate": 9.328030683295366e-05, + "loss": 0.1951, + "step": 41460 + }, + { + "epoch": 1.6012201243291246, + "grad_norm": 0.4601798355579376, + "learning_rate": 9.325456581335187e-05, + "loss": 0.2682, + "step": 41470 + }, + { + "epoch": 1.6016062396231514, + "grad_norm": 0.5605349540710449, + "learning_rate": 9.322882479375009e-05, + "loss": 0.1963, + "step": 41480 + }, + { + "epoch": 1.6019923549171784, + "grad_norm": 0.4736683964729309, + "learning_rate": 9.32030837741483e-05, + "loss": 0.2347, + "step": 41490 + }, + { + "epoch": 1.602378470211205, + "grad_norm": 2.145426034927368, + "learning_rate": 9.317734275454651e-05, + "loss": 0.2782, + "step": 41500 + }, + { + "epoch": 1.602764585505232, + "grad_norm": 0.34660840034484863, + "learning_rate": 9.315160173494473e-05, + "loss": 0.1966, + "step": 41510 + }, + { + "epoch": 1.6031507007992587, + "grad_norm": 1.3674156665802002, + "learning_rate": 9.312586071534294e-05, + "loss": 0.426, + "step": 41520 + }, + { + "epoch": 1.6035368160932855, + "grad_norm": 1.078046441078186, + "learning_rate": 9.310011969574115e-05, + "loss": 0.2345, + "step": 41530 + }, + { + "epoch": 1.6039229313873122, + "grad_norm": 2.233793258666992, + "learning_rate": 9.307437867613937e-05, + "loss": 0.2605, + "step": 41540 + }, + { + "epoch": 1.604309046681339, + "grad_norm": 0.9344108700752258, + "learning_rate": 9.304863765653758e-05, + "loss": 0.114, + "step": 41550 + }, + { + "epoch": 1.604695161975366, + "grad_norm": 0.29096049070358276, + "learning_rate": 9.302289663693579e-05, + "loss": 0.3319, + "step": 41560 + }, + { + "epoch": 1.6050812772693925, + "grad_norm": 0.41333675384521484, + "learning_rate": 9.2997155617334e-05, + "loss": 0.2392, + "step": 41570 + }, + { + "epoch": 1.6054673925634195, + "grad_norm": 1.3718006610870361, + "learning_rate": 9.297141459773222e-05, + "loss": 0.2304, + "step": 41580 + }, + { + "epoch": 1.605853507857446, + "grad_norm": 1.6650983095169067, + "learning_rate": 9.294567357813043e-05, + "loss": 0.3319, + "step": 41590 + }, + { + "epoch": 1.606239623151473, + "grad_norm": 3.4465317726135254, + "learning_rate": 9.291993255852865e-05, + "loss": 0.6168, + "step": 41600 + }, + { + "epoch": 1.6066257384454998, + "grad_norm": 1.2659804821014404, + "learning_rate": 9.289419153892687e-05, + "loss": 0.2291, + "step": 41610 + }, + { + "epoch": 1.6070118537395266, + "grad_norm": 0.4997158348560333, + "learning_rate": 9.286845051932509e-05, + "loss": 0.1619, + "step": 41620 + }, + { + "epoch": 1.6073979690335534, + "grad_norm": 0.87920743227005, + "learning_rate": 9.284270949972329e-05, + "loss": 0.1894, + "step": 41630 + }, + { + "epoch": 1.6077840843275801, + "grad_norm": 1.124824047088623, + "learning_rate": 9.28169684801215e-05, + "loss": 0.4755, + "step": 41640 + }, + { + "epoch": 1.6081701996216071, + "grad_norm": 0.2584467828273773, + "learning_rate": 9.279122746051971e-05, + "loss": 0.2281, + "step": 41650 + }, + { + "epoch": 1.6085563149156337, + "grad_norm": 0.6686498522758484, + "learning_rate": 9.276548644091793e-05, + "loss": 0.3244, + "step": 41660 + }, + { + "epoch": 1.6089424302096607, + "grad_norm": 1.991140365600586, + "learning_rate": 9.273974542131614e-05, + "loss": 0.2776, + "step": 41670 + }, + { + "epoch": 1.6093285455036874, + "grad_norm": 0.40963074564933777, + "learning_rate": 9.271400440171437e-05, + "loss": 0.1072, + "step": 41680 + }, + { + "epoch": 1.6097146607977142, + "grad_norm": 1.117253065109253, + "learning_rate": 9.268826338211258e-05, + "loss": 0.2386, + "step": 41690 + }, + { + "epoch": 1.610100776091741, + "grad_norm": 1.2031314373016357, + "learning_rate": 9.266252236251078e-05, + "loss": 0.1967, + "step": 41700 + }, + { + "epoch": 1.6104868913857677, + "grad_norm": 1.4460607767105103, + "learning_rate": 9.263678134290899e-05, + "loss": 0.2471, + "step": 41710 + }, + { + "epoch": 1.6108730066797947, + "grad_norm": 1.5548468828201294, + "learning_rate": 9.26110403233072e-05, + "loss": 0.1846, + "step": 41720 + }, + { + "epoch": 1.6112591219738213, + "grad_norm": 2.042555809020996, + "learning_rate": 9.258529930370542e-05, + "loss": 0.2791, + "step": 41730 + }, + { + "epoch": 1.6116452372678483, + "grad_norm": 0.19565679132938385, + "learning_rate": 9.255955828410365e-05, + "loss": 0.2138, + "step": 41740 + }, + { + "epoch": 1.612031352561875, + "grad_norm": 0.13099287450313568, + "learning_rate": 9.253381726450186e-05, + "loss": 0.239, + "step": 41750 + }, + { + "epoch": 1.6124174678559018, + "grad_norm": 1.7031620740890503, + "learning_rate": 9.250807624490007e-05, + "loss": 0.4618, + "step": 41760 + }, + { + "epoch": 1.6128035831499286, + "grad_norm": 1.3827983140945435, + "learning_rate": 9.248233522529827e-05, + "loss": 0.3837, + "step": 41770 + }, + { + "epoch": 1.6131896984439553, + "grad_norm": 0.29199469089508057, + "learning_rate": 9.245659420569648e-05, + "loss": 0.2054, + "step": 41780 + }, + { + "epoch": 1.6135758137379823, + "grad_norm": 0.8447692394256592, + "learning_rate": 9.24308531860947e-05, + "loss": 0.2001, + "step": 41790 + }, + { + "epoch": 1.6139619290320089, + "grad_norm": 3.2848644256591797, + "learning_rate": 9.240511216649292e-05, + "loss": 0.2177, + "step": 41800 + }, + { + "epoch": 1.6143480443260358, + "grad_norm": 0.7044230699539185, + "learning_rate": 9.237937114689114e-05, + "loss": 0.2592, + "step": 41810 + }, + { + "epoch": 1.6147341596200624, + "grad_norm": 0.6980583667755127, + "learning_rate": 9.235363012728935e-05, + "loss": 0.2133, + "step": 41820 + }, + { + "epoch": 1.6151202749140894, + "grad_norm": 0.6349130868911743, + "learning_rate": 9.232788910768756e-05, + "loss": 0.1238, + "step": 41830 + }, + { + "epoch": 1.6155063902081161, + "grad_norm": 3.3420283794403076, + "learning_rate": 9.230214808808576e-05, + "loss": 0.4264, + "step": 41840 + }, + { + "epoch": 1.615892505502143, + "grad_norm": 1.1701698303222656, + "learning_rate": 9.227640706848398e-05, + "loss": 0.2886, + "step": 41850 + }, + { + "epoch": 1.6162786207961697, + "grad_norm": 1.7839452028274536, + "learning_rate": 9.22506660488822e-05, + "loss": 0.2431, + "step": 41860 + }, + { + "epoch": 1.6166647360901965, + "grad_norm": 0.5988792181015015, + "learning_rate": 9.222492502928042e-05, + "loss": 0.2162, + "step": 41870 + }, + { + "epoch": 1.6170508513842234, + "grad_norm": 2.511929988861084, + "learning_rate": 9.219918400967863e-05, + "loss": 0.2862, + "step": 41880 + }, + { + "epoch": 1.61743696667825, + "grad_norm": 0.524932861328125, + "learning_rate": 9.217344299007684e-05, + "loss": 0.1984, + "step": 41890 + }, + { + "epoch": 1.617823081972277, + "grad_norm": 0.42112675309181213, + "learning_rate": 9.214770197047506e-05, + "loss": 0.3873, + "step": 41900 + }, + { + "epoch": 1.6182091972663037, + "grad_norm": 2.114396810531616, + "learning_rate": 9.212196095087327e-05, + "loss": 0.209, + "step": 41910 + }, + { + "epoch": 1.6185953125603305, + "grad_norm": 1.023512601852417, + "learning_rate": 9.209621993127147e-05, + "loss": 0.1849, + "step": 41920 + }, + { + "epoch": 1.6189814278543573, + "grad_norm": 0.4894075393676758, + "learning_rate": 9.20704789116697e-05, + "loss": 0.2624, + "step": 41930 + }, + { + "epoch": 1.619367543148384, + "grad_norm": 2.8259129524230957, + "learning_rate": 9.204473789206791e-05, + "loss": 0.1016, + "step": 41940 + }, + { + "epoch": 1.619753658442411, + "grad_norm": 0.30800989270210266, + "learning_rate": 9.201899687246612e-05, + "loss": 0.2056, + "step": 41950 + }, + { + "epoch": 1.6201397737364376, + "grad_norm": 0.9112114906311035, + "learning_rate": 9.199325585286434e-05, + "loss": 0.2935, + "step": 41960 + }, + { + "epoch": 1.6205258890304646, + "grad_norm": 0.9642273783683777, + "learning_rate": 9.196751483326255e-05, + "loss": 0.2782, + "step": 41970 + }, + { + "epoch": 1.6209120043244913, + "grad_norm": 1.4163227081298828, + "learning_rate": 9.194177381366076e-05, + "loss": 0.2259, + "step": 41980 + }, + { + "epoch": 1.621298119618518, + "grad_norm": 1.3390878438949585, + "learning_rate": 9.191603279405898e-05, + "loss": 0.2118, + "step": 41990 + }, + { + "epoch": 1.6216842349125449, + "grad_norm": 0.7193337082862854, + "learning_rate": 9.189029177445719e-05, + "loss": 0.1972, + "step": 42000 + }, + { + "epoch": 1.6220703502065716, + "grad_norm": 0.9605100154876709, + "learning_rate": 9.18645507548554e-05, + "loss": 0.2109, + "step": 42010 + }, + { + "epoch": 1.6224564655005986, + "grad_norm": 1.1802191734313965, + "learning_rate": 9.183880973525362e-05, + "loss": 0.204, + "step": 42020 + }, + { + "epoch": 1.6228425807946252, + "grad_norm": 0.9422736167907715, + "learning_rate": 9.181306871565183e-05, + "loss": 0.2316, + "step": 42030 + }, + { + "epoch": 1.6232286960886522, + "grad_norm": 2.4047484397888184, + "learning_rate": 9.178732769605004e-05, + "loss": 0.3395, + "step": 42040 + }, + { + "epoch": 1.6236148113826787, + "grad_norm": 0.625637948513031, + "learning_rate": 9.176158667644826e-05, + "loss": 0.2339, + "step": 42050 + }, + { + "epoch": 1.6240009266767057, + "grad_norm": 1.628997564315796, + "learning_rate": 9.173584565684647e-05, + "loss": 0.2034, + "step": 42060 + }, + { + "epoch": 1.6243870419707325, + "grad_norm": 0.46910417079925537, + "learning_rate": 9.171010463724468e-05, + "loss": 0.2874, + "step": 42070 + }, + { + "epoch": 1.6247731572647592, + "grad_norm": 1.7646992206573486, + "learning_rate": 9.16843636176429e-05, + "loss": 0.2998, + "step": 42080 + }, + { + "epoch": 1.625159272558786, + "grad_norm": 1.0245219469070435, + "learning_rate": 9.165862259804111e-05, + "loss": 0.3796, + "step": 42090 + }, + { + "epoch": 1.6255453878528128, + "grad_norm": 1.175984501838684, + "learning_rate": 9.163288157843932e-05, + "loss": 0.2683, + "step": 42100 + }, + { + "epoch": 1.6259315031468398, + "grad_norm": 0.5364359617233276, + "learning_rate": 9.160714055883755e-05, + "loss": 0.185, + "step": 42110 + }, + { + "epoch": 1.6263176184408663, + "grad_norm": 2.991917610168457, + "learning_rate": 9.158139953923575e-05, + "loss": 0.4202, + "step": 42120 + }, + { + "epoch": 1.6267037337348933, + "grad_norm": 0.6997040510177612, + "learning_rate": 9.155565851963396e-05, + "loss": 0.2833, + "step": 42130 + }, + { + "epoch": 1.62708984902892, + "grad_norm": 1.2331346273422241, + "learning_rate": 9.152991750003218e-05, + "loss": 0.2001, + "step": 42140 + }, + { + "epoch": 1.6274759643229468, + "grad_norm": 0.5714563131332397, + "learning_rate": 9.150417648043039e-05, + "loss": 0.1649, + "step": 42150 + }, + { + "epoch": 1.6278620796169736, + "grad_norm": 0.18129512667655945, + "learning_rate": 9.14784354608286e-05, + "loss": 0.2042, + "step": 42160 + }, + { + "epoch": 1.6282481949110004, + "grad_norm": 1.0198371410369873, + "learning_rate": 9.145269444122682e-05, + "loss": 0.089, + "step": 42170 + }, + { + "epoch": 1.6286343102050274, + "grad_norm": 1.7465068101882935, + "learning_rate": 9.142695342162504e-05, + "loss": 0.2767, + "step": 42180 + }, + { + "epoch": 1.629020425499054, + "grad_norm": 1.5238598585128784, + "learning_rate": 9.140121240202324e-05, + "loss": 0.2682, + "step": 42190 + }, + { + "epoch": 1.629406540793081, + "grad_norm": 0.21305501461029053, + "learning_rate": 9.137547138242146e-05, + "loss": 0.1656, + "step": 42200 + }, + { + "epoch": 1.6297926560871077, + "grad_norm": 2.7441041469573975, + "learning_rate": 9.134973036281967e-05, + "loss": 0.1734, + "step": 42210 + }, + { + "epoch": 1.6301787713811344, + "grad_norm": 1.0751773118972778, + "learning_rate": 9.132398934321788e-05, + "loss": 0.3272, + "step": 42220 + }, + { + "epoch": 1.6305648866751612, + "grad_norm": 2.0907068252563477, + "learning_rate": 9.12982483236161e-05, + "loss": 0.2322, + "step": 42230 + }, + { + "epoch": 1.630951001969188, + "grad_norm": 0.8943297266960144, + "learning_rate": 9.127250730401432e-05, + "loss": 0.1908, + "step": 42240 + }, + { + "epoch": 1.631337117263215, + "grad_norm": 0.30939817428588867, + "learning_rate": 9.124676628441254e-05, + "loss": 0.2398, + "step": 42250 + }, + { + "epoch": 1.6317232325572415, + "grad_norm": 0.07228074967861176, + "learning_rate": 9.122102526481075e-05, + "loss": 0.2146, + "step": 42260 + }, + { + "epoch": 1.6321093478512685, + "grad_norm": 1.6129286289215088, + "learning_rate": 9.119528424520895e-05, + "loss": 0.2989, + "step": 42270 + }, + { + "epoch": 1.632495463145295, + "grad_norm": 3.1282570362091064, + "learning_rate": 9.116954322560716e-05, + "loss": 0.2296, + "step": 42280 + }, + { + "epoch": 1.632881578439322, + "grad_norm": 1.9693125486373901, + "learning_rate": 9.114380220600538e-05, + "loss": 0.3122, + "step": 42290 + }, + { + "epoch": 1.6332676937333488, + "grad_norm": 0.7935388088226318, + "learning_rate": 9.11180611864036e-05, + "loss": 0.2738, + "step": 42300 + }, + { + "epoch": 1.6336538090273756, + "grad_norm": 0.6630697250366211, + "learning_rate": 9.109232016680182e-05, + "loss": 0.1349, + "step": 42310 + }, + { + "epoch": 1.6340399243214023, + "grad_norm": 0.6689760684967041, + "learning_rate": 9.106657914720003e-05, + "loss": 0.2881, + "step": 42320 + }, + { + "epoch": 1.634426039615429, + "grad_norm": 1.3075367212295532, + "learning_rate": 9.104083812759824e-05, + "loss": 0.3599, + "step": 42330 + }, + { + "epoch": 1.634812154909456, + "grad_norm": 0.6957948803901672, + "learning_rate": 9.101509710799644e-05, + "loss": 0.2562, + "step": 42340 + }, + { + "epoch": 1.6351982702034826, + "grad_norm": 0.6405411958694458, + "learning_rate": 9.098935608839466e-05, + "loss": 0.3024, + "step": 42350 + }, + { + "epoch": 1.6355843854975096, + "grad_norm": 1.4570382833480835, + "learning_rate": 9.096361506879288e-05, + "loss": 0.2415, + "step": 42360 + }, + { + "epoch": 1.6359705007915364, + "grad_norm": 0.28187355399131775, + "learning_rate": 9.09378740491911e-05, + "loss": 0.3213, + "step": 42370 + }, + { + "epoch": 1.6363566160855632, + "grad_norm": 0.5807194113731384, + "learning_rate": 9.091213302958931e-05, + "loss": 0.2513, + "step": 42380 + }, + { + "epoch": 1.63674273137959, + "grad_norm": 2.750338554382324, + "learning_rate": 9.088639200998752e-05, + "loss": 0.2667, + "step": 42390 + }, + { + "epoch": 1.6371288466736167, + "grad_norm": 2.9717495441436768, + "learning_rate": 9.086065099038574e-05, + "loss": 0.2719, + "step": 42400 + }, + { + "epoch": 1.6375149619676437, + "grad_norm": 0.5903140306472778, + "learning_rate": 9.083490997078394e-05, + "loss": 0.2861, + "step": 42410 + }, + { + "epoch": 1.6379010772616702, + "grad_norm": 0.5791400671005249, + "learning_rate": 9.080916895118215e-05, + "loss": 0.1999, + "step": 42420 + }, + { + "epoch": 1.6382871925556972, + "grad_norm": 0.5550700426101685, + "learning_rate": 9.078342793158038e-05, + "loss": 0.2856, + "step": 42430 + }, + { + "epoch": 1.638673307849724, + "grad_norm": 0.1384456902742386, + "learning_rate": 9.075768691197859e-05, + "loss": 0.1379, + "step": 42440 + }, + { + "epoch": 1.6390594231437507, + "grad_norm": 0.4201198220252991, + "learning_rate": 9.07319458923768e-05, + "loss": 0.2401, + "step": 42450 + }, + { + "epoch": 1.6394455384377775, + "grad_norm": 0.4227651059627533, + "learning_rate": 9.070620487277502e-05, + "loss": 0.1251, + "step": 42460 + }, + { + "epoch": 1.6398316537318043, + "grad_norm": 0.5025180578231812, + "learning_rate": 9.068046385317323e-05, + "loss": 0.1911, + "step": 42470 + }, + { + "epoch": 1.6402177690258313, + "grad_norm": 1.5064163208007812, + "learning_rate": 9.065472283357144e-05, + "loss": 0.2291, + "step": 42480 + }, + { + "epoch": 1.6406038843198578, + "grad_norm": 1.0776429176330566, + "learning_rate": 9.062898181396966e-05, + "loss": 0.1429, + "step": 42490 + }, + { + "epoch": 1.6409899996138848, + "grad_norm": 1.409090280532837, + "learning_rate": 9.060324079436787e-05, + "loss": 0.1768, + "step": 42500 + }, + { + "epoch": 1.6413761149079114, + "grad_norm": 1.4606170654296875, + "learning_rate": 9.057749977476608e-05, + "loss": 0.1657, + "step": 42510 + }, + { + "epoch": 1.6417622302019383, + "grad_norm": 0.1410249024629593, + "learning_rate": 9.05517587551643e-05, + "loss": 0.1768, + "step": 42520 + }, + { + "epoch": 1.6421483454959651, + "grad_norm": 1.3056964874267578, + "learning_rate": 9.052601773556251e-05, + "loss": 0.2001, + "step": 42530 + }, + { + "epoch": 1.6425344607899919, + "grad_norm": 0.7286831140518188, + "learning_rate": 9.050027671596072e-05, + "loss": 0.2181, + "step": 42540 + }, + { + "epoch": 1.6429205760840186, + "grad_norm": 0.7450721263885498, + "learning_rate": 9.047453569635894e-05, + "loss": 0.1961, + "step": 42550 + }, + { + "epoch": 1.6433066913780454, + "grad_norm": 0.3484252691268921, + "learning_rate": 9.044879467675715e-05, + "loss": 0.1435, + "step": 42560 + }, + { + "epoch": 1.6436928066720724, + "grad_norm": 1.4743714332580566, + "learning_rate": 9.042305365715536e-05, + "loss": 0.218, + "step": 42570 + }, + { + "epoch": 1.644078921966099, + "grad_norm": 1.9126007556915283, + "learning_rate": 9.039731263755358e-05, + "loss": 0.3114, + "step": 42580 + }, + { + "epoch": 1.644465037260126, + "grad_norm": 0.1249854639172554, + "learning_rate": 9.037157161795179e-05, + "loss": 0.1675, + "step": 42590 + }, + { + "epoch": 1.6448511525541527, + "grad_norm": 0.948403537273407, + "learning_rate": 9.034583059835e-05, + "loss": 0.1956, + "step": 42600 + }, + { + "epoch": 1.6452372678481795, + "grad_norm": 1.6507424116134644, + "learning_rate": 9.032008957874823e-05, + "loss": 0.2754, + "step": 42610 + }, + { + "epoch": 1.6456233831422062, + "grad_norm": 2.7686564922332764, + "learning_rate": 9.029434855914643e-05, + "loss": 0.3409, + "step": 42620 + }, + { + "epoch": 1.646009498436233, + "grad_norm": 2.7112228870391846, + "learning_rate": 9.026860753954464e-05, + "loss": 0.3626, + "step": 42630 + }, + { + "epoch": 1.64639561373026, + "grad_norm": 1.400976538658142, + "learning_rate": 9.024286651994286e-05, + "loss": 0.199, + "step": 42640 + }, + { + "epoch": 1.6467817290242865, + "grad_norm": 1.4591180086135864, + "learning_rate": 9.021712550034107e-05, + "loss": 0.4385, + "step": 42650 + }, + { + "epoch": 1.6471678443183135, + "grad_norm": 1.386496901512146, + "learning_rate": 9.019138448073928e-05, + "loss": 0.1433, + "step": 42660 + }, + { + "epoch": 1.64755395961234, + "grad_norm": 2.054412841796875, + "learning_rate": 9.01656434611375e-05, + "loss": 0.1602, + "step": 42670 + }, + { + "epoch": 1.647940074906367, + "grad_norm": 1.5373504161834717, + "learning_rate": 9.013990244153572e-05, + "loss": 0.1329, + "step": 42680 + }, + { + "epoch": 1.6483261902003938, + "grad_norm": 2.131410598754883, + "learning_rate": 9.011416142193392e-05, + "loss": 0.2858, + "step": 42690 + }, + { + "epoch": 1.6487123054944206, + "grad_norm": 1.5870622396469116, + "learning_rate": 9.008842040233214e-05, + "loss": 0.414, + "step": 42700 + }, + { + "epoch": 1.6490984207884476, + "grad_norm": 0.46856650710105896, + "learning_rate": 9.006267938273035e-05, + "loss": 0.1262, + "step": 42710 + }, + { + "epoch": 1.6494845360824741, + "grad_norm": 1.368762731552124, + "learning_rate": 9.003693836312856e-05, + "loss": 0.2341, + "step": 42720 + }, + { + "epoch": 1.6498706513765011, + "grad_norm": 2.3937511444091797, + "learning_rate": 9.001119734352678e-05, + "loss": 0.215, + "step": 42730 + }, + { + "epoch": 1.6502567666705277, + "grad_norm": 0.9359129071235657, + "learning_rate": 8.9985456323925e-05, + "loss": 0.2173, + "step": 42740 + }, + { + "epoch": 1.6506428819645547, + "grad_norm": 1.381408452987671, + "learning_rate": 8.995971530432322e-05, + "loss": 0.1671, + "step": 42750 + }, + { + "epoch": 1.6510289972585814, + "grad_norm": 0.5645018815994263, + "learning_rate": 8.993397428472142e-05, + "loss": 0.1943, + "step": 42760 + }, + { + "epoch": 1.6514151125526082, + "grad_norm": 1.1989009380340576, + "learning_rate": 8.990823326511963e-05, + "loss": 0.1845, + "step": 42770 + }, + { + "epoch": 1.651801227846635, + "grad_norm": 2.2969398498535156, + "learning_rate": 8.988249224551784e-05, + "loss": 0.3469, + "step": 42780 + }, + { + "epoch": 1.6521873431406617, + "grad_norm": 1.931502342224121, + "learning_rate": 8.985675122591606e-05, + "loss": 0.3857, + "step": 42790 + }, + { + "epoch": 1.6525734584346887, + "grad_norm": 1.2291436195373535, + "learning_rate": 8.983101020631428e-05, + "loss": 0.1948, + "step": 42800 + }, + { + "epoch": 1.6529595737287153, + "grad_norm": 0.3344796597957611, + "learning_rate": 8.98052691867125e-05, + "loss": 0.2707, + "step": 42810 + }, + { + "epoch": 1.6533456890227423, + "grad_norm": 1.9174389839172363, + "learning_rate": 8.977952816711071e-05, + "loss": 0.2318, + "step": 42820 + }, + { + "epoch": 1.653731804316769, + "grad_norm": 0.49493780732154846, + "learning_rate": 8.975378714750892e-05, + "loss": 0.2615, + "step": 42830 + }, + { + "epoch": 1.6541179196107958, + "grad_norm": 0.8160524964332581, + "learning_rate": 8.972804612790712e-05, + "loss": 0.1837, + "step": 42840 + }, + { + "epoch": 1.6545040349048226, + "grad_norm": 0.571782112121582, + "learning_rate": 8.970230510830534e-05, + "loss": 0.1451, + "step": 42850 + }, + { + "epoch": 1.6548901501988493, + "grad_norm": 2.636889934539795, + "learning_rate": 8.967656408870356e-05, + "loss": 0.348, + "step": 42860 + }, + { + "epoch": 1.6552762654928763, + "grad_norm": 0.4561298191547394, + "learning_rate": 8.965082306910178e-05, + "loss": 0.3301, + "step": 42870 + }, + { + "epoch": 1.6556623807869029, + "grad_norm": 0.7691421508789062, + "learning_rate": 8.962508204949999e-05, + "loss": 0.231, + "step": 42880 + }, + { + "epoch": 1.6560484960809299, + "grad_norm": 0.2073112428188324, + "learning_rate": 8.95993410298982e-05, + "loss": 0.1358, + "step": 42890 + }, + { + "epoch": 1.6564346113749564, + "grad_norm": 0.35970741510391235, + "learning_rate": 8.957360001029642e-05, + "loss": 0.3412, + "step": 42900 + }, + { + "epoch": 1.6568207266689834, + "grad_norm": 0.6349666118621826, + "learning_rate": 8.954785899069462e-05, + "loss": 0.3279, + "step": 42910 + }, + { + "epoch": 1.6572068419630102, + "grad_norm": 1.6498395204544067, + "learning_rate": 8.952211797109284e-05, + "loss": 0.3195, + "step": 42920 + }, + { + "epoch": 1.657592957257037, + "grad_norm": 1.200462818145752, + "learning_rate": 8.949637695149106e-05, + "loss": 0.1533, + "step": 42930 + }, + { + "epoch": 1.6579790725510637, + "grad_norm": 3.063417673110962, + "learning_rate": 8.947063593188927e-05, + "loss": 0.1553, + "step": 42940 + }, + { + "epoch": 1.6583651878450905, + "grad_norm": 2.632843494415283, + "learning_rate": 8.944489491228748e-05, + "loss": 0.1819, + "step": 42950 + }, + { + "epoch": 1.6587513031391175, + "grad_norm": 1.2199878692626953, + "learning_rate": 8.94191538926857e-05, + "loss": 0.3631, + "step": 42960 + }, + { + "epoch": 1.659137418433144, + "grad_norm": 4.311095237731934, + "learning_rate": 8.939341287308391e-05, + "loss": 0.2747, + "step": 42970 + }, + { + "epoch": 1.659523533727171, + "grad_norm": 1.0289263725280762, + "learning_rate": 8.936767185348211e-05, + "loss": 0.225, + "step": 42980 + }, + { + "epoch": 1.6599096490211978, + "grad_norm": 0.17542269825935364, + "learning_rate": 8.934193083388034e-05, + "loss": 0.3562, + "step": 42990 + }, + { + "epoch": 1.6602957643152245, + "grad_norm": 1.6451623439788818, + "learning_rate": 8.931618981427855e-05, + "loss": 0.3318, + "step": 43000 + }, + { + "epoch": 1.6606818796092513, + "grad_norm": 0.6164776682853699, + "learning_rate": 8.929044879467676e-05, + "loss": 0.272, + "step": 43010 + }, + { + "epoch": 1.661067994903278, + "grad_norm": 0.8627731800079346, + "learning_rate": 8.926470777507498e-05, + "loss": 0.2037, + "step": 43020 + }, + { + "epoch": 1.661454110197305, + "grad_norm": 0.9663155674934387, + "learning_rate": 8.923896675547319e-05, + "loss": 0.2291, + "step": 43030 + }, + { + "epoch": 1.6618402254913316, + "grad_norm": 1.909785509109497, + "learning_rate": 8.92132257358714e-05, + "loss": 0.357, + "step": 43040 + }, + { + "epoch": 1.6622263407853586, + "grad_norm": 1.4348317384719849, + "learning_rate": 8.918748471626962e-05, + "loss": 0.2494, + "step": 43050 + }, + { + "epoch": 1.6626124560793853, + "grad_norm": 1.8088570833206177, + "learning_rate": 8.916174369666783e-05, + "loss": 0.1663, + "step": 43060 + }, + { + "epoch": 1.6629985713734121, + "grad_norm": 0.4020337462425232, + "learning_rate": 8.913600267706604e-05, + "loss": 0.251, + "step": 43070 + }, + { + "epoch": 1.6633846866674389, + "grad_norm": 1.4719258546829224, + "learning_rate": 8.911026165746426e-05, + "loss": 0.3267, + "step": 43080 + }, + { + "epoch": 1.6637708019614657, + "grad_norm": 0.2420385479927063, + "learning_rate": 8.908452063786247e-05, + "loss": 0.2289, + "step": 43090 + }, + { + "epoch": 1.6641569172554926, + "grad_norm": 0.4697989225387573, + "learning_rate": 8.905877961826068e-05, + "loss": 0.1597, + "step": 43100 + }, + { + "epoch": 1.6645430325495192, + "grad_norm": 0.4061245918273926, + "learning_rate": 8.90330385986589e-05, + "loss": 0.2299, + "step": 43110 + }, + { + "epoch": 1.6649291478435462, + "grad_norm": 1.4116615056991577, + "learning_rate": 8.900729757905711e-05, + "loss": 0.251, + "step": 43120 + }, + { + "epoch": 1.6653152631375727, + "grad_norm": 0.2808239161968231, + "learning_rate": 8.898155655945532e-05, + "loss": 0.2204, + "step": 43130 + }, + { + "epoch": 1.6657013784315997, + "grad_norm": 0.09532297402620316, + "learning_rate": 8.895581553985354e-05, + "loss": 0.2076, + "step": 43140 + }, + { + "epoch": 1.6660874937256265, + "grad_norm": 1.6641709804534912, + "learning_rate": 8.893007452025175e-05, + "loss": 0.1697, + "step": 43150 + }, + { + "epoch": 1.6664736090196532, + "grad_norm": 0.9248488545417786, + "learning_rate": 8.890433350064996e-05, + "loss": 0.4659, + "step": 43160 + }, + { + "epoch": 1.66685972431368, + "grad_norm": 2.1415629386901855, + "learning_rate": 8.887859248104819e-05, + "loss": 0.3056, + "step": 43170 + }, + { + "epoch": 1.6672458396077068, + "grad_norm": 0.39359986782073975, + "learning_rate": 8.885285146144639e-05, + "loss": 0.2799, + "step": 43180 + }, + { + "epoch": 1.6676319549017338, + "grad_norm": 0.09023096412420273, + "learning_rate": 8.88271104418446e-05, + "loss": 0.2777, + "step": 43190 + }, + { + "epoch": 1.6680180701957603, + "grad_norm": 1.8555763959884644, + "learning_rate": 8.880136942224282e-05, + "loss": 0.3152, + "step": 43200 + }, + { + "epoch": 1.6684041854897873, + "grad_norm": 0.18823792040348053, + "learning_rate": 8.877562840264103e-05, + "loss": 0.1625, + "step": 43210 + }, + { + "epoch": 1.668790300783814, + "grad_norm": 1.5633597373962402, + "learning_rate": 8.874988738303924e-05, + "loss": 0.3041, + "step": 43220 + }, + { + "epoch": 1.6691764160778408, + "grad_norm": 1.6646497249603271, + "learning_rate": 8.872414636343746e-05, + "loss": 0.3719, + "step": 43230 + }, + { + "epoch": 1.6695625313718676, + "grad_norm": 1.569008469581604, + "learning_rate": 8.869840534383568e-05, + "loss": 0.1797, + "step": 43240 + }, + { + "epoch": 1.6699486466658944, + "grad_norm": 0.44778671860694885, + "learning_rate": 8.86726643242339e-05, + "loss": 0.1192, + "step": 43250 + }, + { + "epoch": 1.6703347619599214, + "grad_norm": 1.936880350112915, + "learning_rate": 8.86469233046321e-05, + "loss": 0.262, + "step": 43260 + }, + { + "epoch": 1.670720877253948, + "grad_norm": 2.0454766750335693, + "learning_rate": 8.862118228503031e-05, + "loss": 0.125, + "step": 43270 + }, + { + "epoch": 1.671106992547975, + "grad_norm": 2.3878109455108643, + "learning_rate": 8.859544126542852e-05, + "loss": 0.2755, + "step": 43280 + }, + { + "epoch": 1.6714931078420017, + "grad_norm": 2.1281206607818604, + "learning_rate": 8.856970024582674e-05, + "loss": 0.3056, + "step": 43290 + }, + { + "epoch": 1.6718792231360284, + "grad_norm": 1.1602097749710083, + "learning_rate": 8.854395922622496e-05, + "loss": 0.3131, + "step": 43300 + }, + { + "epoch": 1.6722653384300552, + "grad_norm": 1.444730520248413, + "learning_rate": 8.851821820662318e-05, + "loss": 0.2922, + "step": 43310 + }, + { + "epoch": 1.672651453724082, + "grad_norm": 1.0081762075424194, + "learning_rate": 8.849247718702139e-05, + "loss": 0.2689, + "step": 43320 + }, + { + "epoch": 1.673037569018109, + "grad_norm": 2.532080888748169, + "learning_rate": 8.846673616741959e-05, + "loss": 0.4207, + "step": 43330 + }, + { + "epoch": 1.6734236843121355, + "grad_norm": 1.1733953952789307, + "learning_rate": 8.84409951478178e-05, + "loss": 0.3253, + "step": 43340 + }, + { + "epoch": 1.6738097996061625, + "grad_norm": 0.5781744122505188, + "learning_rate": 8.841525412821602e-05, + "loss": 0.165, + "step": 43350 + }, + { + "epoch": 1.674195914900189, + "grad_norm": 2.3320493698120117, + "learning_rate": 8.838951310861424e-05, + "loss": 0.1466, + "step": 43360 + }, + { + "epoch": 1.674582030194216, + "grad_norm": 0.6514772176742554, + "learning_rate": 8.836377208901246e-05, + "loss": 0.2392, + "step": 43370 + }, + { + "epoch": 1.6749681454882428, + "grad_norm": 2.446646213531494, + "learning_rate": 8.833803106941067e-05, + "loss": 0.2237, + "step": 43380 + }, + { + "epoch": 1.6753542607822696, + "grad_norm": 1.0567893981933594, + "learning_rate": 8.831229004980888e-05, + "loss": 0.4682, + "step": 43390 + }, + { + "epoch": 1.6757403760762963, + "grad_norm": 1.559910774230957, + "learning_rate": 8.828654903020708e-05, + "loss": 0.2283, + "step": 43400 + }, + { + "epoch": 1.676126491370323, + "grad_norm": 2.8934245109558105, + "learning_rate": 8.82608080106053e-05, + "loss": 0.255, + "step": 43410 + }, + { + "epoch": 1.67651260666435, + "grad_norm": 1.592612862586975, + "learning_rate": 8.823506699100352e-05, + "loss": 0.3115, + "step": 43420 + }, + { + "epoch": 1.6768987219583766, + "grad_norm": 0.19438475370407104, + "learning_rate": 8.820932597140174e-05, + "loss": 0.2549, + "step": 43430 + }, + { + "epoch": 1.6772848372524036, + "grad_norm": 1.3219093084335327, + "learning_rate": 8.818358495179995e-05, + "loss": 0.3156, + "step": 43440 + }, + { + "epoch": 1.6776709525464304, + "grad_norm": 0.23484057188034058, + "learning_rate": 8.815784393219816e-05, + "loss": 0.1866, + "step": 43450 + }, + { + "epoch": 1.6780570678404572, + "grad_norm": 1.284556269645691, + "learning_rate": 8.813210291259638e-05, + "loss": 0.2019, + "step": 43460 + }, + { + "epoch": 1.678443183134484, + "grad_norm": 1.4797706604003906, + "learning_rate": 8.810636189299459e-05, + "loss": 0.2423, + "step": 43470 + }, + { + "epoch": 1.6788292984285107, + "grad_norm": 1.6002583503723145, + "learning_rate": 8.808062087339279e-05, + "loss": 0.182, + "step": 43480 + }, + { + "epoch": 1.6792154137225377, + "grad_norm": 1.0503334999084473, + "learning_rate": 8.805487985379102e-05, + "loss": 0.2062, + "step": 43490 + }, + { + "epoch": 1.6796015290165642, + "grad_norm": 1.0708048343658447, + "learning_rate": 8.802913883418923e-05, + "loss": 0.1004, + "step": 43500 + }, + { + "epoch": 1.6799876443105912, + "grad_norm": 1.6162430047988892, + "learning_rate": 8.800339781458744e-05, + "loss": 0.1858, + "step": 43510 + }, + { + "epoch": 1.680373759604618, + "grad_norm": 3.5026352405548096, + "learning_rate": 8.797765679498566e-05, + "loss": 0.3431, + "step": 43520 + }, + { + "epoch": 1.6807598748986448, + "grad_norm": 1.1792393922805786, + "learning_rate": 8.795191577538387e-05, + "loss": 0.3389, + "step": 43530 + }, + { + "epoch": 1.6811459901926715, + "grad_norm": 1.0513951778411865, + "learning_rate": 8.792617475578208e-05, + "loss": 0.4476, + "step": 43540 + }, + { + "epoch": 1.6815321054866983, + "grad_norm": 1.3215738534927368, + "learning_rate": 8.79004337361803e-05, + "loss": 0.1624, + "step": 43550 + }, + { + "epoch": 1.6819182207807253, + "grad_norm": 0.4807497262954712, + "learning_rate": 8.787469271657851e-05, + "loss": 0.2777, + "step": 43560 + }, + { + "epoch": 1.6823043360747518, + "grad_norm": 1.1114505529403687, + "learning_rate": 8.784895169697672e-05, + "loss": 0.1405, + "step": 43570 + }, + { + "epoch": 1.6826904513687788, + "grad_norm": 0.8023913502693176, + "learning_rate": 8.782321067737494e-05, + "loss": 0.1744, + "step": 43580 + }, + { + "epoch": 1.6830765666628054, + "grad_norm": 1.1723856925964355, + "learning_rate": 8.779746965777315e-05, + "loss": 0.214, + "step": 43590 + }, + { + "epoch": 1.6834626819568324, + "grad_norm": 2.1813528537750244, + "learning_rate": 8.777172863817136e-05, + "loss": 0.2897, + "step": 43600 + }, + { + "epoch": 1.6838487972508591, + "grad_norm": 0.2930634319782257, + "learning_rate": 8.774598761856958e-05, + "loss": 0.121, + "step": 43610 + }, + { + "epoch": 1.6842349125448859, + "grad_norm": 0.8081830739974976, + "learning_rate": 8.772024659896779e-05, + "loss": 0.2847, + "step": 43620 + }, + { + "epoch": 1.6846210278389127, + "grad_norm": 0.8421902060508728, + "learning_rate": 8.7694505579366e-05, + "loss": 0.3151, + "step": 43630 + }, + { + "epoch": 1.6850071431329394, + "grad_norm": 1.6624525785446167, + "learning_rate": 8.766876455976421e-05, + "loss": 0.1725, + "step": 43640 + }, + { + "epoch": 1.6853932584269664, + "grad_norm": 0.6831340789794922, + "learning_rate": 8.764302354016243e-05, + "loss": 0.432, + "step": 43650 + }, + { + "epoch": 1.685779373720993, + "grad_norm": 1.0642820596694946, + "learning_rate": 8.761728252056064e-05, + "loss": 0.2035, + "step": 43660 + }, + { + "epoch": 1.68616548901502, + "grad_norm": 0.6445997357368469, + "learning_rate": 8.759154150095887e-05, + "loss": 0.2407, + "step": 43670 + }, + { + "epoch": 1.6865516043090467, + "grad_norm": 0.1020059660077095, + "learning_rate": 8.756580048135707e-05, + "loss": 0.1744, + "step": 43680 + }, + { + "epoch": 1.6869377196030735, + "grad_norm": 0.3709975481033325, + "learning_rate": 8.754005946175528e-05, + "loss": 0.3713, + "step": 43690 + }, + { + "epoch": 1.6873238348971002, + "grad_norm": 0.5910777449607849, + "learning_rate": 8.75143184421535e-05, + "loss": 0.4291, + "step": 43700 + }, + { + "epoch": 1.687709950191127, + "grad_norm": 2.377362012863159, + "learning_rate": 8.748857742255171e-05, + "loss": 0.2871, + "step": 43710 + }, + { + "epoch": 1.688096065485154, + "grad_norm": 0.46812891960144043, + "learning_rate": 8.746283640294992e-05, + "loss": 0.1998, + "step": 43720 + }, + { + "epoch": 1.6884821807791806, + "grad_norm": 1.391098141670227, + "learning_rate": 8.743709538334813e-05, + "loss": 0.3189, + "step": 43730 + }, + { + "epoch": 1.6888682960732075, + "grad_norm": 0.11116664111614227, + "learning_rate": 8.741135436374636e-05, + "loss": 0.2538, + "step": 43740 + }, + { + "epoch": 1.6892544113672343, + "grad_norm": 0.2116546928882599, + "learning_rate": 8.738561334414456e-05, + "loss": 0.2014, + "step": 43750 + }, + { + "epoch": 1.689640526661261, + "grad_norm": 2.3815419673919678, + "learning_rate": 8.735987232454277e-05, + "loss": 0.2395, + "step": 43760 + }, + { + "epoch": 1.6900266419552878, + "grad_norm": 0.11581381410360336, + "learning_rate": 8.733413130494099e-05, + "loss": 0.2699, + "step": 43770 + }, + { + "epoch": 1.6904127572493146, + "grad_norm": 0.25713813304901123, + "learning_rate": 8.73083902853392e-05, + "loss": 0.1651, + "step": 43780 + }, + { + "epoch": 1.6907988725433416, + "grad_norm": 0.3496115803718567, + "learning_rate": 8.728264926573741e-05, + "loss": 0.3175, + "step": 43790 + }, + { + "epoch": 1.6911849878373681, + "grad_norm": 0.4966486394405365, + "learning_rate": 8.725690824613564e-05, + "loss": 0.258, + "step": 43800 + }, + { + "epoch": 1.6915711031313951, + "grad_norm": 1.2760952711105347, + "learning_rate": 8.723116722653385e-05, + "loss": 0.0882, + "step": 43810 + }, + { + "epoch": 1.6919572184254217, + "grad_norm": 1.0095762014389038, + "learning_rate": 8.720542620693207e-05, + "loss": 0.2171, + "step": 43820 + }, + { + "epoch": 1.6923433337194487, + "grad_norm": 3.596276044845581, + "learning_rate": 8.717968518733027e-05, + "loss": 0.238, + "step": 43830 + }, + { + "epoch": 1.6927294490134754, + "grad_norm": 0.3974495530128479, + "learning_rate": 8.715394416772848e-05, + "loss": 0.1459, + "step": 43840 + }, + { + "epoch": 1.6931155643075022, + "grad_norm": 1.1426684856414795, + "learning_rate": 8.71282031481267e-05, + "loss": 0.297, + "step": 43850 + }, + { + "epoch": 1.693501679601529, + "grad_norm": 0.887981116771698, + "learning_rate": 8.710246212852492e-05, + "loss": 0.1999, + "step": 43860 + }, + { + "epoch": 1.6938877948955557, + "grad_norm": 2.5697977542877197, + "learning_rate": 8.707672110892313e-05, + "loss": 0.3186, + "step": 43870 + }, + { + "epoch": 1.6942739101895827, + "grad_norm": 1.0131279230117798, + "learning_rate": 8.705098008932135e-05, + "loss": 0.182, + "step": 43880 + }, + { + "epoch": 1.6946600254836093, + "grad_norm": 0.8319138288497925, + "learning_rate": 8.702523906971956e-05, + "loss": 0.2148, + "step": 43890 + }, + { + "epoch": 1.6950461407776363, + "grad_norm": 0.7270296216011047, + "learning_rate": 8.699949805011776e-05, + "loss": 0.1471, + "step": 43900 + }, + { + "epoch": 1.695432256071663, + "grad_norm": 1.2435482740402222, + "learning_rate": 8.697375703051597e-05, + "loss": 0.1446, + "step": 43910 + }, + { + "epoch": 1.6958183713656898, + "grad_norm": 0.5655059218406677, + "learning_rate": 8.69480160109142e-05, + "loss": 0.2765, + "step": 43920 + }, + { + "epoch": 1.6962044866597166, + "grad_norm": 1.7254856824874878, + "learning_rate": 8.692227499131241e-05, + "loss": 0.3449, + "step": 43930 + }, + { + "epoch": 1.6965906019537433, + "grad_norm": 0.5344254374504089, + "learning_rate": 8.689653397171063e-05, + "loss": 0.3847, + "step": 43940 + }, + { + "epoch": 1.6969767172477703, + "grad_norm": 0.7520522475242615, + "learning_rate": 8.687079295210884e-05, + "loss": 0.2785, + "step": 43950 + }, + { + "epoch": 1.6973628325417969, + "grad_norm": 2.708897352218628, + "learning_rate": 8.684505193250705e-05, + "loss": 0.3298, + "step": 43960 + }, + { + "epoch": 1.6977489478358239, + "grad_norm": 0.06953504681587219, + "learning_rate": 8.681931091290525e-05, + "loss": 0.094, + "step": 43970 + }, + { + "epoch": 1.6981350631298504, + "grad_norm": 0.586360514163971, + "learning_rate": 8.679356989330347e-05, + "loss": 0.2969, + "step": 43980 + }, + { + "epoch": 1.6985211784238774, + "grad_norm": 0.5543690919876099, + "learning_rate": 8.67678288737017e-05, + "loss": 0.1414, + "step": 43990 + }, + { + "epoch": 1.6989072937179042, + "grad_norm": 1.7767741680145264, + "learning_rate": 8.674208785409991e-05, + "loss": 0.3374, + "step": 44000 + }, + { + "epoch": 1.699293409011931, + "grad_norm": 1.463431477546692, + "learning_rate": 8.671634683449812e-05, + "loss": 0.2547, + "step": 44010 + }, + { + "epoch": 1.699679524305958, + "grad_norm": 0.3490108251571655, + "learning_rate": 8.669060581489633e-05, + "loss": 0.2969, + "step": 44020 + }, + { + "epoch": 1.7000656395999845, + "grad_norm": 0.4525464177131653, + "learning_rate": 8.666486479529455e-05, + "loss": 0.2982, + "step": 44030 + }, + { + "epoch": 1.7004517548940115, + "grad_norm": 0.3723921775817871, + "learning_rate": 8.663912377569276e-05, + "loss": 0.2818, + "step": 44040 + }, + { + "epoch": 1.700837870188038, + "grad_norm": 0.5301186442375183, + "learning_rate": 8.661338275609097e-05, + "loss": 0.1871, + "step": 44050 + }, + { + "epoch": 1.701223985482065, + "grad_norm": 2.890627861022949, + "learning_rate": 8.658764173648919e-05, + "loss": 0.3608, + "step": 44060 + }, + { + "epoch": 1.7016101007760918, + "grad_norm": 1.4061124324798584, + "learning_rate": 8.65619007168874e-05, + "loss": 0.203, + "step": 44070 + }, + { + "epoch": 1.7019962160701185, + "grad_norm": 2.882411241531372, + "learning_rate": 8.653615969728561e-05, + "loss": 0.2203, + "step": 44080 + }, + { + "epoch": 1.7023823313641453, + "grad_norm": 1.3079861402511597, + "learning_rate": 8.651041867768383e-05, + "loss": 0.1199, + "step": 44090 + }, + { + "epoch": 1.702768446658172, + "grad_norm": 4.284554481506348, + "learning_rate": 8.648467765808204e-05, + "loss": 0.2187, + "step": 44100 + }, + { + "epoch": 1.703154561952199, + "grad_norm": 0.14506328105926514, + "learning_rate": 8.645893663848025e-05, + "loss": 0.2822, + "step": 44110 + }, + { + "epoch": 1.7035406772462256, + "grad_norm": 0.48547646403312683, + "learning_rate": 8.643319561887847e-05, + "loss": 0.2239, + "step": 44120 + }, + { + "epoch": 1.7039267925402526, + "grad_norm": 1.2987794876098633, + "learning_rate": 8.640745459927668e-05, + "loss": 0.1554, + "step": 44130 + }, + { + "epoch": 1.7043129078342794, + "grad_norm": 0.25391751527786255, + "learning_rate": 8.63817135796749e-05, + "loss": 0.2635, + "step": 44140 + }, + { + "epoch": 1.7046990231283061, + "grad_norm": 1.7552175521850586, + "learning_rate": 8.635597256007311e-05, + "loss": 0.2762, + "step": 44150 + }, + { + "epoch": 1.705085138422333, + "grad_norm": 0.8608360290527344, + "learning_rate": 8.633023154047132e-05, + "loss": 0.2384, + "step": 44160 + }, + { + "epoch": 1.7054712537163597, + "grad_norm": 2.4618284702301025, + "learning_rate": 8.630449052086955e-05, + "loss": 0.3383, + "step": 44170 + }, + { + "epoch": 1.7058573690103866, + "grad_norm": 0.9636523127555847, + "learning_rate": 8.627874950126775e-05, + "loss": 0.2036, + "step": 44180 + }, + { + "epoch": 1.7062434843044132, + "grad_norm": 0.3806803226470947, + "learning_rate": 8.625300848166596e-05, + "loss": 0.1378, + "step": 44190 + }, + { + "epoch": 1.7066295995984402, + "grad_norm": 0.31704220175743103, + "learning_rate": 8.622726746206417e-05, + "loss": 0.2171, + "step": 44200 + }, + { + "epoch": 1.7070157148924667, + "grad_norm": 2.3355181217193604, + "learning_rate": 8.620152644246239e-05, + "loss": 0.2983, + "step": 44210 + }, + { + "epoch": 1.7074018301864937, + "grad_norm": 1.3819530010223389, + "learning_rate": 8.61757854228606e-05, + "loss": 0.3944, + "step": 44220 + }, + { + "epoch": 1.7077879454805205, + "grad_norm": 0.5096393823623657, + "learning_rate": 8.615004440325883e-05, + "loss": 0.3142, + "step": 44230 + }, + { + "epoch": 1.7081740607745473, + "grad_norm": 0.7113396525382996, + "learning_rate": 8.612430338365704e-05, + "loss": 0.1644, + "step": 44240 + }, + { + "epoch": 1.708560176068574, + "grad_norm": 0.3259173333644867, + "learning_rate": 8.609856236405524e-05, + "loss": 0.208, + "step": 44250 + }, + { + "epoch": 1.7089462913626008, + "grad_norm": 0.1341869980096817, + "learning_rate": 8.607282134445345e-05, + "loss": 0.1642, + "step": 44260 + }, + { + "epoch": 1.7093324066566278, + "grad_norm": 1.0947731733322144, + "learning_rate": 8.604708032485167e-05, + "loss": 0.3079, + "step": 44270 + }, + { + "epoch": 1.7097185219506543, + "grad_norm": 1.065469741821289, + "learning_rate": 8.602133930524988e-05, + "loss": 0.2845, + "step": 44280 + }, + { + "epoch": 1.7101046372446813, + "grad_norm": 0.23346304893493652, + "learning_rate": 8.59955982856481e-05, + "loss": 0.1428, + "step": 44290 + }, + { + "epoch": 1.710490752538708, + "grad_norm": 0.9997304677963257, + "learning_rate": 8.596985726604632e-05, + "loss": 0.2313, + "step": 44300 + }, + { + "epoch": 1.7108768678327348, + "grad_norm": 0.2540823519229889, + "learning_rate": 8.594411624644453e-05, + "loss": 0.131, + "step": 44310 + }, + { + "epoch": 1.7112629831267616, + "grad_norm": 0.35752159357070923, + "learning_rate": 8.591837522684273e-05, + "loss": 0.2414, + "step": 44320 + }, + { + "epoch": 1.7116490984207884, + "grad_norm": 0.9997861385345459, + "learning_rate": 8.589263420724095e-05, + "loss": 0.2726, + "step": 44330 + }, + { + "epoch": 1.7120352137148154, + "grad_norm": 0.1516636461019516, + "learning_rate": 8.586689318763916e-05, + "loss": 0.0939, + "step": 44340 + }, + { + "epoch": 1.712421329008842, + "grad_norm": 1.5101124048233032, + "learning_rate": 8.584115216803737e-05, + "loss": 0.1331, + "step": 44350 + }, + { + "epoch": 1.712807444302869, + "grad_norm": 1.8988938331604004, + "learning_rate": 8.58154111484356e-05, + "loss": 0.104, + "step": 44360 + }, + { + "epoch": 1.7131935595968957, + "grad_norm": 0.50355464220047, + "learning_rate": 8.578967012883381e-05, + "loss": 0.2283, + "step": 44370 + }, + { + "epoch": 1.7135796748909224, + "grad_norm": 1.3266645669937134, + "learning_rate": 8.576392910923203e-05, + "loss": 0.2639, + "step": 44380 + }, + { + "epoch": 1.7139657901849492, + "grad_norm": 0.5714776515960693, + "learning_rate": 8.573818808963023e-05, + "loss": 0.3018, + "step": 44390 + }, + { + "epoch": 1.714351905478976, + "grad_norm": 2.2676408290863037, + "learning_rate": 8.571244707002844e-05, + "loss": 0.1851, + "step": 44400 + }, + { + "epoch": 1.714738020773003, + "grad_norm": 1.338866114616394, + "learning_rate": 8.568670605042665e-05, + "loss": 0.198, + "step": 44410 + }, + { + "epoch": 1.7151241360670295, + "grad_norm": 0.7841195464134216, + "learning_rate": 8.566096503082488e-05, + "loss": 0.1689, + "step": 44420 + }, + { + "epoch": 1.7155102513610565, + "grad_norm": 2.0101919174194336, + "learning_rate": 8.56352240112231e-05, + "loss": 0.278, + "step": 44430 + }, + { + "epoch": 1.715896366655083, + "grad_norm": 0.3661075830459595, + "learning_rate": 8.560948299162131e-05, + "loss": 0.19, + "step": 44440 + }, + { + "epoch": 1.71628248194911, + "grad_norm": 1.3221279382705688, + "learning_rate": 8.558374197201952e-05, + "loss": 0.2621, + "step": 44450 + }, + { + "epoch": 1.7166685972431368, + "grad_norm": 1.1006388664245605, + "learning_rate": 8.555800095241773e-05, + "loss": 0.1661, + "step": 44460 + }, + { + "epoch": 1.7170547125371636, + "grad_norm": 0.6990749835968018, + "learning_rate": 8.553225993281593e-05, + "loss": 0.1234, + "step": 44470 + }, + { + "epoch": 1.7174408278311903, + "grad_norm": 1.1236909627914429, + "learning_rate": 8.550651891321416e-05, + "loss": 0.3528, + "step": 44480 + }, + { + "epoch": 1.717826943125217, + "grad_norm": 1.3502060174942017, + "learning_rate": 8.548077789361237e-05, + "loss": 0.1035, + "step": 44490 + }, + { + "epoch": 1.718213058419244, + "grad_norm": 0.6443360447883606, + "learning_rate": 8.545503687401059e-05, + "loss": 0.257, + "step": 44500 + }, + { + "epoch": 1.7185991737132706, + "grad_norm": 1.864953875541687, + "learning_rate": 8.54292958544088e-05, + "loss": 0.1791, + "step": 44510 + }, + { + "epoch": 1.7189852890072976, + "grad_norm": 0.8403190970420837, + "learning_rate": 8.540355483480701e-05, + "loss": 0.1907, + "step": 44520 + }, + { + "epoch": 1.7193714043013244, + "grad_norm": 2.308983325958252, + "learning_rate": 8.537781381520523e-05, + "loss": 0.3784, + "step": 44530 + }, + { + "epoch": 1.7197575195953512, + "grad_norm": 0.3229024112224579, + "learning_rate": 8.535207279560343e-05, + "loss": 0.2405, + "step": 44540 + }, + { + "epoch": 1.720143634889378, + "grad_norm": 0.5669896602630615, + "learning_rate": 8.532633177600165e-05, + "loss": 0.2103, + "step": 44550 + }, + { + "epoch": 1.7205297501834047, + "grad_norm": 0.9105948805809021, + "learning_rate": 8.530059075639987e-05, + "loss": 0.1816, + "step": 44560 + }, + { + "epoch": 1.7209158654774317, + "grad_norm": 0.4213886260986328, + "learning_rate": 8.527484973679808e-05, + "loss": 0.1735, + "step": 44570 + }, + { + "epoch": 1.7213019807714582, + "grad_norm": 2.9891350269317627, + "learning_rate": 8.52491087171963e-05, + "loss": 0.3105, + "step": 44580 + }, + { + "epoch": 1.7216880960654852, + "grad_norm": 2.2383408546447754, + "learning_rate": 8.52233676975945e-05, + "loss": 0.2677, + "step": 44590 + }, + { + "epoch": 1.722074211359512, + "grad_norm": 0.128885418176651, + "learning_rate": 8.519762667799272e-05, + "loss": 0.2, + "step": 44600 + }, + { + "epoch": 1.7224603266535388, + "grad_norm": 1.021690845489502, + "learning_rate": 8.517188565839093e-05, + "loss": 0.3636, + "step": 44610 + }, + { + "epoch": 1.7228464419475655, + "grad_norm": 3.629401445388794, + "learning_rate": 8.514614463878915e-05, + "loss": 0.3223, + "step": 44620 + }, + { + "epoch": 1.7232325572415923, + "grad_norm": 2.4377548694610596, + "learning_rate": 8.512040361918736e-05, + "loss": 0.2448, + "step": 44630 + }, + { + "epoch": 1.7236186725356193, + "grad_norm": 0.6085236668586731, + "learning_rate": 8.509466259958557e-05, + "loss": 0.2202, + "step": 44640 + }, + { + "epoch": 1.7240047878296458, + "grad_norm": 1.4205585718154907, + "learning_rate": 8.506892157998379e-05, + "loss": 0.1697, + "step": 44650 + }, + { + "epoch": 1.7243909031236728, + "grad_norm": 2.6096320152282715, + "learning_rate": 8.5043180560382e-05, + "loss": 0.1657, + "step": 44660 + }, + { + "epoch": 1.7247770184176994, + "grad_norm": 0.155767560005188, + "learning_rate": 8.501743954078021e-05, + "loss": 0.162, + "step": 44670 + }, + { + "epoch": 1.7251631337117264, + "grad_norm": 0.30843400955200195, + "learning_rate": 8.499169852117843e-05, + "loss": 0.2844, + "step": 44680 + }, + { + "epoch": 1.7255492490057531, + "grad_norm": 0.7207142114639282, + "learning_rate": 8.496595750157664e-05, + "loss": 0.3617, + "step": 44690 + }, + { + "epoch": 1.72593536429978, + "grad_norm": 0.6151508092880249, + "learning_rate": 8.494021648197485e-05, + "loss": 0.1202, + "step": 44700 + }, + { + "epoch": 1.7263214795938067, + "grad_norm": 1.712505578994751, + "learning_rate": 8.491447546237307e-05, + "loss": 0.3624, + "step": 44710 + }, + { + "epoch": 1.7267075948878334, + "grad_norm": 2.3895373344421387, + "learning_rate": 8.488873444277128e-05, + "loss": 0.3347, + "step": 44720 + }, + { + "epoch": 1.7270937101818604, + "grad_norm": 0.3279499411582947, + "learning_rate": 8.48629934231695e-05, + "loss": 0.1821, + "step": 44730 + }, + { + "epoch": 1.727479825475887, + "grad_norm": 0.9812091588973999, + "learning_rate": 8.48372524035677e-05, + "loss": 0.3173, + "step": 44740 + }, + { + "epoch": 1.727865940769914, + "grad_norm": 0.36781829595565796, + "learning_rate": 8.481151138396592e-05, + "loss": 0.2766, + "step": 44750 + }, + { + "epoch": 1.7282520560639407, + "grad_norm": 2.1118052005767822, + "learning_rate": 8.478577036436413e-05, + "loss": 0.4954, + "step": 44760 + }, + { + "epoch": 1.7286381713579675, + "grad_norm": 0.7159673571586609, + "learning_rate": 8.476002934476235e-05, + "loss": 0.3877, + "step": 44770 + }, + { + "epoch": 1.7290242866519943, + "grad_norm": 0.16327527165412903, + "learning_rate": 8.473428832516056e-05, + "loss": 0.2975, + "step": 44780 + }, + { + "epoch": 1.729410401946021, + "grad_norm": 1.2890655994415283, + "learning_rate": 8.470854730555877e-05, + "loss": 0.2056, + "step": 44790 + }, + { + "epoch": 1.729796517240048, + "grad_norm": 0.7911620140075684, + "learning_rate": 8.4682806285957e-05, + "loss": 0.2162, + "step": 44800 + }, + { + "epoch": 1.7301826325340746, + "grad_norm": 0.1271449327468872, + "learning_rate": 8.465706526635521e-05, + "loss": 0.1821, + "step": 44810 + }, + { + "epoch": 1.7305687478281016, + "grad_norm": 1.1765756607055664, + "learning_rate": 8.463132424675341e-05, + "loss": 0.1826, + "step": 44820 + }, + { + "epoch": 1.7309548631221283, + "grad_norm": 1.5291017293930054, + "learning_rate": 8.460558322715163e-05, + "loss": 0.2125, + "step": 44830 + }, + { + "epoch": 1.731340978416155, + "grad_norm": 1.1815464496612549, + "learning_rate": 8.457984220754984e-05, + "loss": 0.1488, + "step": 44840 + }, + { + "epoch": 1.7317270937101819, + "grad_norm": 2.127589464187622, + "learning_rate": 8.455410118794805e-05, + "loss": 0.3758, + "step": 44850 + }, + { + "epoch": 1.7321132090042086, + "grad_norm": 0.5369740724563599, + "learning_rate": 8.452836016834628e-05, + "loss": 0.2725, + "step": 44860 + }, + { + "epoch": 1.7324993242982356, + "grad_norm": 1.038955807685852, + "learning_rate": 8.450261914874449e-05, + "loss": 0.233, + "step": 44870 + }, + { + "epoch": 1.7328854395922622, + "grad_norm": 1.5458402633666992, + "learning_rate": 8.44768781291427e-05, + "loss": 0.3846, + "step": 44880 + }, + { + "epoch": 1.7332715548862891, + "grad_norm": 0.8488010168075562, + "learning_rate": 8.44511371095409e-05, + "loss": 0.2022, + "step": 44890 + }, + { + "epoch": 1.7336576701803157, + "grad_norm": 2.4855663776397705, + "learning_rate": 8.442539608993912e-05, + "loss": 0.2071, + "step": 44900 + }, + { + "epoch": 1.7340437854743427, + "grad_norm": 5.918638229370117, + "learning_rate": 8.439965507033733e-05, + "loss": 0.1962, + "step": 44910 + }, + { + "epoch": 1.7344299007683694, + "grad_norm": 1.9365994930267334, + "learning_rate": 8.437391405073556e-05, + "loss": 0.1313, + "step": 44920 + }, + { + "epoch": 1.7348160160623962, + "grad_norm": 1.1711100339889526, + "learning_rate": 8.434817303113377e-05, + "loss": 0.2938, + "step": 44930 + }, + { + "epoch": 1.735202131356423, + "grad_norm": 1.0350059270858765, + "learning_rate": 8.432243201153199e-05, + "loss": 0.1813, + "step": 44940 + }, + { + "epoch": 1.7355882466504498, + "grad_norm": 0.9831900596618652, + "learning_rate": 8.42966909919302e-05, + "loss": 0.1572, + "step": 44950 + }, + { + "epoch": 1.7359743619444767, + "grad_norm": 2.4830219745635986, + "learning_rate": 8.42709499723284e-05, + "loss": 0.2408, + "step": 44960 + }, + { + "epoch": 1.7363604772385033, + "grad_norm": 2.513667345046997, + "learning_rate": 8.424520895272661e-05, + "loss": 0.3392, + "step": 44970 + }, + { + "epoch": 1.7367465925325303, + "grad_norm": 1.1947827339172363, + "learning_rate": 8.421946793312484e-05, + "loss": 0.1314, + "step": 44980 + }, + { + "epoch": 1.737132707826557, + "grad_norm": 0.9299411773681641, + "learning_rate": 8.419372691352305e-05, + "loss": 0.1691, + "step": 44990 + }, + { + "epoch": 1.7375188231205838, + "grad_norm": 2.9388837814331055, + "learning_rate": 8.416798589392127e-05, + "loss": 0.389, + "step": 45000 + }, + { + "epoch": 1.7379049384146106, + "grad_norm": 0.20148181915283203, + "learning_rate": 8.414224487431948e-05, + "loss": 0.2589, + "step": 45010 + }, + { + "epoch": 1.7382910537086373, + "grad_norm": 1.1276886463165283, + "learning_rate": 8.411650385471769e-05, + "loss": 0.2772, + "step": 45020 + }, + { + "epoch": 1.7386771690026643, + "grad_norm": 0.14743736386299133, + "learning_rate": 8.40907628351159e-05, + "loss": 0.2235, + "step": 45030 + }, + { + "epoch": 1.7390632842966909, + "grad_norm": 1.7028512954711914, + "learning_rate": 8.40650218155141e-05, + "loss": 0.262, + "step": 45040 + }, + { + "epoch": 1.7394493995907179, + "grad_norm": 0.27506566047668457, + "learning_rate": 8.403928079591233e-05, + "loss": 0.1546, + "step": 45050 + }, + { + "epoch": 1.7398355148847446, + "grad_norm": 0.11555200815200806, + "learning_rate": 8.401353977631055e-05, + "loss": 0.2591, + "step": 45060 + }, + { + "epoch": 1.7402216301787714, + "grad_norm": 2.141800880432129, + "learning_rate": 8.398779875670876e-05, + "loss": 0.1974, + "step": 45070 + }, + { + "epoch": 1.7406077454727982, + "grad_norm": 0.8879682421684265, + "learning_rate": 8.396205773710697e-05, + "loss": 0.2192, + "step": 45080 + }, + { + "epoch": 1.740993860766825, + "grad_norm": 0.5697862505912781, + "learning_rate": 8.393631671750519e-05, + "loss": 0.1556, + "step": 45090 + }, + { + "epoch": 1.741379976060852, + "grad_norm": 1.5055205821990967, + "learning_rate": 8.39105756979034e-05, + "loss": 0.2199, + "step": 45100 + }, + { + "epoch": 1.7417660913548785, + "grad_norm": 1.4144301414489746, + "learning_rate": 8.388483467830161e-05, + "loss": 0.1873, + "step": 45110 + }, + { + "epoch": 1.7421522066489055, + "grad_norm": 2.419147253036499, + "learning_rate": 8.385909365869983e-05, + "loss": 0.3444, + "step": 45120 + }, + { + "epoch": 1.742538321942932, + "grad_norm": 1.1189093589782715, + "learning_rate": 8.383335263909804e-05, + "loss": 0.2641, + "step": 45130 + }, + { + "epoch": 1.742924437236959, + "grad_norm": 0.44919779896736145, + "learning_rate": 8.380761161949625e-05, + "loss": 0.1945, + "step": 45140 + }, + { + "epoch": 1.7433105525309858, + "grad_norm": 3.4231624603271484, + "learning_rate": 8.378187059989447e-05, + "loss": 0.372, + "step": 45150 + }, + { + "epoch": 1.7436966678250125, + "grad_norm": 1.382497787475586, + "learning_rate": 8.375612958029268e-05, + "loss": 0.1532, + "step": 45160 + }, + { + "epoch": 1.7440827831190393, + "grad_norm": 1.9219565391540527, + "learning_rate": 8.373038856069089e-05, + "loss": 0.3255, + "step": 45170 + }, + { + "epoch": 1.744468898413066, + "grad_norm": 1.2347924709320068, + "learning_rate": 8.37046475410891e-05, + "loss": 0.2786, + "step": 45180 + }, + { + "epoch": 1.744855013707093, + "grad_norm": 0.1424872726202011, + "learning_rate": 8.367890652148732e-05, + "loss": 0.2561, + "step": 45190 + }, + { + "epoch": 1.7452411290011196, + "grad_norm": 0.1864137351512909, + "learning_rate": 8.365316550188553e-05, + "loss": 0.4099, + "step": 45200 + }, + { + "epoch": 1.7456272442951466, + "grad_norm": 0.13210314512252808, + "learning_rate": 8.362742448228375e-05, + "loss": 0.2684, + "step": 45210 + }, + { + "epoch": 1.7460133595891734, + "grad_norm": 0.14317531883716583, + "learning_rate": 8.360168346268196e-05, + "loss": 0.1777, + "step": 45220 + }, + { + "epoch": 1.7463994748832001, + "grad_norm": 0.9194528460502625, + "learning_rate": 8.357594244308019e-05, + "loss": 0.2047, + "step": 45230 + }, + { + "epoch": 1.746785590177227, + "grad_norm": 0.13146016001701355, + "learning_rate": 8.355020142347839e-05, + "loss": 0.1659, + "step": 45240 + }, + { + "epoch": 1.7471717054712537, + "grad_norm": 0.804329514503479, + "learning_rate": 8.35244604038766e-05, + "loss": 0.1685, + "step": 45250 + }, + { + "epoch": 1.7475578207652807, + "grad_norm": 2.3637967109680176, + "learning_rate": 8.349871938427481e-05, + "loss": 0.234, + "step": 45260 + }, + { + "epoch": 1.7479439360593072, + "grad_norm": 0.550857663154602, + "learning_rate": 8.347297836467303e-05, + "loss": 0.2283, + "step": 45270 + }, + { + "epoch": 1.7483300513533342, + "grad_norm": 0.5919561982154846, + "learning_rate": 8.344723734507124e-05, + "loss": 0.229, + "step": 45280 + }, + { + "epoch": 1.7487161666473607, + "grad_norm": 2.1318750381469727, + "learning_rate": 8.342149632546945e-05, + "loss": 0.2161, + "step": 45290 + }, + { + "epoch": 1.7491022819413877, + "grad_norm": 2.0451972484588623, + "learning_rate": 8.339575530586768e-05, + "loss": 0.1845, + "step": 45300 + }, + { + "epoch": 1.7494883972354145, + "grad_norm": 0.1041000485420227, + "learning_rate": 8.337001428626588e-05, + "loss": 0.1473, + "step": 45310 + }, + { + "epoch": 1.7498745125294413, + "grad_norm": 1.8641316890716553, + "learning_rate": 8.334427326666409e-05, + "loss": 0.2287, + "step": 45320 + }, + { + "epoch": 1.7502606278234683, + "grad_norm": 0.5701905488967896, + "learning_rate": 8.33185322470623e-05, + "loss": 0.1356, + "step": 45330 + }, + { + "epoch": 1.7506467431174948, + "grad_norm": 0.78929603099823, + "learning_rate": 8.329279122746052e-05, + "loss": 0.1407, + "step": 45340 + }, + { + "epoch": 1.7510328584115218, + "grad_norm": 1.4745780229568481, + "learning_rate": 8.326705020785873e-05, + "loss": 0.2731, + "step": 45350 + }, + { + "epoch": 1.7514189737055483, + "grad_norm": 0.5546283721923828, + "learning_rate": 8.324130918825696e-05, + "loss": 0.252, + "step": 45360 + }, + { + "epoch": 1.7518050889995753, + "grad_norm": 0.6111850142478943, + "learning_rate": 8.321556816865517e-05, + "loss": 0.3657, + "step": 45370 + }, + { + "epoch": 1.752191204293602, + "grad_norm": 0.9489399790763855, + "learning_rate": 8.318982714905339e-05, + "loss": 0.2214, + "step": 45380 + }, + { + "epoch": 1.7525773195876289, + "grad_norm": 0.2951168119907379, + "learning_rate": 8.316408612945159e-05, + "loss": 0.2425, + "step": 45390 + }, + { + "epoch": 1.7529634348816556, + "grad_norm": 0.13124701380729675, + "learning_rate": 8.31383451098498e-05, + "loss": 0.1943, + "step": 45400 + }, + { + "epoch": 1.7533495501756824, + "grad_norm": 0.787786066532135, + "learning_rate": 8.311260409024801e-05, + "loss": 0.2009, + "step": 45410 + }, + { + "epoch": 1.7537356654697094, + "grad_norm": 0.30316632986068726, + "learning_rate": 8.308686307064624e-05, + "loss": 0.2426, + "step": 45420 + }, + { + "epoch": 1.754121780763736, + "grad_norm": 1.0703738927841187, + "learning_rate": 8.306112205104445e-05, + "loss": 0.2247, + "step": 45430 + }, + { + "epoch": 1.754507896057763, + "grad_norm": 0.2669302821159363, + "learning_rate": 8.303538103144267e-05, + "loss": 0.2076, + "step": 45440 + }, + { + "epoch": 1.7548940113517897, + "grad_norm": 2.0138015747070312, + "learning_rate": 8.300964001184088e-05, + "loss": 0.3064, + "step": 45450 + }, + { + "epoch": 1.7552801266458165, + "grad_norm": 4.026096820831299, + "learning_rate": 8.298389899223908e-05, + "loss": 0.2664, + "step": 45460 + }, + { + "epoch": 1.7556662419398432, + "grad_norm": 0.7756350636482239, + "learning_rate": 8.295815797263729e-05, + "loss": 0.224, + "step": 45470 + }, + { + "epoch": 1.75605235723387, + "grad_norm": 0.8312183618545532, + "learning_rate": 8.293241695303552e-05, + "loss": 0.393, + "step": 45480 + }, + { + "epoch": 1.756438472527897, + "grad_norm": 1.6933070421218872, + "learning_rate": 8.290667593343373e-05, + "loss": 0.2006, + "step": 45490 + }, + { + "epoch": 1.7568245878219235, + "grad_norm": 0.7471343278884888, + "learning_rate": 8.288093491383195e-05, + "loss": 0.1356, + "step": 45500 + }, + { + "epoch": 1.7572107031159505, + "grad_norm": 2.5663211345672607, + "learning_rate": 8.285519389423016e-05, + "loss": 0.2545, + "step": 45510 + }, + { + "epoch": 1.757596818409977, + "grad_norm": 2.9056997299194336, + "learning_rate": 8.282945287462837e-05, + "loss": 0.2321, + "step": 45520 + }, + { + "epoch": 1.757982933704004, + "grad_norm": 0.6775566935539246, + "learning_rate": 8.280371185502657e-05, + "loss": 0.1703, + "step": 45530 + }, + { + "epoch": 1.7583690489980308, + "grad_norm": 1.0130302906036377, + "learning_rate": 8.27779708354248e-05, + "loss": 0.3204, + "step": 45540 + }, + { + "epoch": 1.7587551642920576, + "grad_norm": 0.8566673398017883, + "learning_rate": 8.275222981582301e-05, + "loss": 0.2069, + "step": 45550 + }, + { + "epoch": 1.7591412795860843, + "grad_norm": 1.0929473638534546, + "learning_rate": 8.272648879622122e-05, + "loss": 0.2631, + "step": 45560 + }, + { + "epoch": 1.7595273948801111, + "grad_norm": 1.7695512771606445, + "learning_rate": 8.270074777661944e-05, + "loss": 0.2028, + "step": 45570 + }, + { + "epoch": 1.759913510174138, + "grad_norm": 1.5217735767364502, + "learning_rate": 8.267500675701765e-05, + "loss": 0.1925, + "step": 45580 + }, + { + "epoch": 1.7602996254681647, + "grad_norm": 0.639868438243866, + "learning_rate": 8.264926573741586e-05, + "loss": 0.3473, + "step": 45590 + }, + { + "epoch": 1.7606857407621916, + "grad_norm": 0.3286214768886566, + "learning_rate": 8.262352471781408e-05, + "loss": 0.1136, + "step": 45600 + }, + { + "epoch": 1.7610718560562184, + "grad_norm": 1.0162252187728882, + "learning_rate": 8.259778369821229e-05, + "loss": 0.1108, + "step": 45610 + }, + { + "epoch": 1.7614579713502452, + "grad_norm": 0.8821909427642822, + "learning_rate": 8.25720426786105e-05, + "loss": 0.2852, + "step": 45620 + }, + { + "epoch": 1.761844086644272, + "grad_norm": 0.7297468781471252, + "learning_rate": 8.254630165900872e-05, + "loss": 0.187, + "step": 45630 + }, + { + "epoch": 1.7622302019382987, + "grad_norm": 2.8545424938201904, + "learning_rate": 8.252056063940693e-05, + "loss": 0.2441, + "step": 45640 + }, + { + "epoch": 1.7626163172323257, + "grad_norm": 0.5221932530403137, + "learning_rate": 8.249481961980514e-05, + "loss": 0.1934, + "step": 45650 + }, + { + "epoch": 1.7630024325263522, + "grad_norm": 1.1896415948867798, + "learning_rate": 8.246907860020336e-05, + "loss": 0.2149, + "step": 45660 + }, + { + "epoch": 1.7633885478203792, + "grad_norm": 0.28229987621307373, + "learning_rate": 8.244333758060157e-05, + "loss": 0.2514, + "step": 45670 + }, + { + "epoch": 1.763774663114406, + "grad_norm": 1.2415190935134888, + "learning_rate": 8.241759656099978e-05, + "loss": 0.4076, + "step": 45680 + }, + { + "epoch": 1.7641607784084328, + "grad_norm": 2.535752296447754, + "learning_rate": 8.2391855541398e-05, + "loss": 0.1628, + "step": 45690 + }, + { + "epoch": 1.7645468937024595, + "grad_norm": 1.8700125217437744, + "learning_rate": 8.236611452179621e-05, + "loss": 0.2549, + "step": 45700 + }, + { + "epoch": 1.7649330089964863, + "grad_norm": 0.5365914702415466, + "learning_rate": 8.234037350219442e-05, + "loss": 0.0801, + "step": 45710 + }, + { + "epoch": 1.7653191242905133, + "grad_norm": 3.5979111194610596, + "learning_rate": 8.231463248259264e-05, + "loss": 0.3129, + "step": 45720 + }, + { + "epoch": 1.7657052395845398, + "grad_norm": 2.5577943325042725, + "learning_rate": 8.228889146299086e-05, + "loss": 0.2171, + "step": 45730 + }, + { + "epoch": 1.7660913548785668, + "grad_norm": 2.128424882888794, + "learning_rate": 8.226315044338906e-05, + "loss": 0.1687, + "step": 45740 + }, + { + "epoch": 1.7664774701725934, + "grad_norm": 0.41651803255081177, + "learning_rate": 8.223740942378728e-05, + "loss": 0.2342, + "step": 45750 + }, + { + "epoch": 1.7668635854666204, + "grad_norm": 0.8309571146965027, + "learning_rate": 8.221166840418549e-05, + "loss": 0.2406, + "step": 45760 + }, + { + "epoch": 1.7672497007606471, + "grad_norm": 1.7219020128250122, + "learning_rate": 8.21859273845837e-05, + "loss": 0.2764, + "step": 45770 + }, + { + "epoch": 1.767635816054674, + "grad_norm": 1.5574841499328613, + "learning_rate": 8.216018636498192e-05, + "loss": 0.3413, + "step": 45780 + }, + { + "epoch": 1.7680219313487007, + "grad_norm": 0.48963573575019836, + "learning_rate": 8.213444534538014e-05, + "loss": 0.2381, + "step": 45790 + }, + { + "epoch": 1.7684080466427274, + "grad_norm": 2.597996711730957, + "learning_rate": 8.210870432577836e-05, + "loss": 0.2625, + "step": 45800 + }, + { + "epoch": 1.7687941619367544, + "grad_norm": 1.0208812952041626, + "learning_rate": 8.208296330617656e-05, + "loss": 0.3519, + "step": 45810 + }, + { + "epoch": 1.769180277230781, + "grad_norm": 0.6976078748703003, + "learning_rate": 8.205722228657477e-05, + "loss": 0.2075, + "step": 45820 + }, + { + "epoch": 1.769566392524808, + "grad_norm": 1.534698247909546, + "learning_rate": 8.203148126697298e-05, + "loss": 0.1831, + "step": 45830 + }, + { + "epoch": 1.7699525078188347, + "grad_norm": 1.5414319038391113, + "learning_rate": 8.20057402473712e-05, + "loss": 0.2466, + "step": 45840 + }, + { + "epoch": 1.7703386231128615, + "grad_norm": 0.6082043051719666, + "learning_rate": 8.197999922776941e-05, + "loss": 0.2598, + "step": 45850 + }, + { + "epoch": 1.7707247384068883, + "grad_norm": 1.3977060317993164, + "learning_rate": 8.195425820816764e-05, + "loss": 0.2733, + "step": 45860 + }, + { + "epoch": 1.771110853700915, + "grad_norm": 1.2991368770599365, + "learning_rate": 8.192851718856585e-05, + "loss": 0.3078, + "step": 45870 + }, + { + "epoch": 1.771496968994942, + "grad_norm": 0.2851855754852295, + "learning_rate": 8.190277616896405e-05, + "loss": 0.3315, + "step": 45880 + }, + { + "epoch": 1.7718830842889686, + "grad_norm": 0.7140517234802246, + "learning_rate": 8.187703514936226e-05, + "loss": 0.2454, + "step": 45890 + }, + { + "epoch": 1.7722691995829956, + "grad_norm": 2.3793671131134033, + "learning_rate": 8.185129412976048e-05, + "loss": 0.3749, + "step": 45900 + }, + { + "epoch": 1.7726553148770223, + "grad_norm": 0.6969908475875854, + "learning_rate": 8.182555311015869e-05, + "loss": 0.2849, + "step": 45910 + }, + { + "epoch": 1.773041430171049, + "grad_norm": 0.7409896850585938, + "learning_rate": 8.179981209055692e-05, + "loss": 0.3614, + "step": 45920 + }, + { + "epoch": 1.7734275454650759, + "grad_norm": 2.1562981605529785, + "learning_rate": 8.177407107095513e-05, + "loss": 0.3234, + "step": 45930 + }, + { + "epoch": 1.7738136607591026, + "grad_norm": 0.8214607834815979, + "learning_rate": 8.174833005135334e-05, + "loss": 0.3747, + "step": 45940 + }, + { + "epoch": 1.7741997760531296, + "grad_norm": 1.1292701959609985, + "learning_rate": 8.172258903175154e-05, + "loss": 0.2048, + "step": 45950 + }, + { + "epoch": 1.7745858913471562, + "grad_norm": 0.968303918838501, + "learning_rate": 8.169684801214976e-05, + "loss": 0.1863, + "step": 45960 + }, + { + "epoch": 1.7749720066411832, + "grad_norm": 0.1449265480041504, + "learning_rate": 8.167110699254797e-05, + "loss": 0.2583, + "step": 45970 + }, + { + "epoch": 1.7753581219352097, + "grad_norm": 0.9772675633430481, + "learning_rate": 8.16453659729462e-05, + "loss": 0.372, + "step": 45980 + }, + { + "epoch": 1.7757442372292367, + "grad_norm": 1.8147861957550049, + "learning_rate": 8.161962495334441e-05, + "loss": 0.2404, + "step": 45990 + }, + { + "epoch": 1.7761303525232635, + "grad_norm": 1.828444004058838, + "learning_rate": 8.159388393374262e-05, + "loss": 0.4309, + "step": 46000 + }, + { + "epoch": 1.7765164678172902, + "grad_norm": 0.4886840879917145, + "learning_rate": 8.156814291414084e-05, + "loss": 0.1927, + "step": 46010 + }, + { + "epoch": 1.776902583111317, + "grad_norm": 0.7208076119422913, + "learning_rate": 8.154240189453905e-05, + "loss": 0.2186, + "step": 46020 + }, + { + "epoch": 1.7772886984053438, + "grad_norm": 0.6111328601837158, + "learning_rate": 8.151666087493725e-05, + "loss": 0.2065, + "step": 46030 + }, + { + "epoch": 1.7776748136993707, + "grad_norm": 2.601994514465332, + "learning_rate": 8.149091985533548e-05, + "loss": 0.1138, + "step": 46040 + }, + { + "epoch": 1.7780609289933973, + "grad_norm": 2.0283939838409424, + "learning_rate": 8.146517883573369e-05, + "loss": 0.3461, + "step": 46050 + }, + { + "epoch": 1.7784470442874243, + "grad_norm": 1.329325795173645, + "learning_rate": 8.14394378161319e-05, + "loss": 0.1981, + "step": 46060 + }, + { + "epoch": 1.778833159581451, + "grad_norm": 0.9197964072227478, + "learning_rate": 8.141369679653012e-05, + "loss": 0.1853, + "step": 46070 + }, + { + "epoch": 1.7792192748754778, + "grad_norm": 0.5444622039794922, + "learning_rate": 8.138795577692833e-05, + "loss": 0.194, + "step": 46080 + }, + { + "epoch": 1.7796053901695046, + "grad_norm": 0.655691385269165, + "learning_rate": 8.136221475732654e-05, + "loss": 0.3279, + "step": 46090 + }, + { + "epoch": 1.7799915054635314, + "grad_norm": 1.0497363805770874, + "learning_rate": 8.133647373772474e-05, + "loss": 0.1208, + "step": 46100 + }, + { + "epoch": 1.7803776207575583, + "grad_norm": 1.5492208003997803, + "learning_rate": 8.131073271812297e-05, + "loss": 0.1155, + "step": 46110 + }, + { + "epoch": 1.780763736051585, + "grad_norm": 0.7815436720848083, + "learning_rate": 8.128499169852118e-05, + "loss": 0.3621, + "step": 46120 + }, + { + "epoch": 1.7811498513456119, + "grad_norm": 0.2011369913816452, + "learning_rate": 8.12592506789194e-05, + "loss": 0.1412, + "step": 46130 + }, + { + "epoch": 1.7815359666396386, + "grad_norm": 0.5177563428878784, + "learning_rate": 8.123350965931761e-05, + "loss": 0.2485, + "step": 46140 + }, + { + "epoch": 1.7819220819336654, + "grad_norm": 1.1141633987426758, + "learning_rate": 8.120776863971582e-05, + "loss": 0.2376, + "step": 46150 + }, + { + "epoch": 1.7823081972276922, + "grad_norm": 0.3764217495918274, + "learning_rate": 8.118202762011404e-05, + "loss": 0.2021, + "step": 46160 + }, + { + "epoch": 1.782694312521719, + "grad_norm": 0.7420102953910828, + "learning_rate": 8.115628660051225e-05, + "loss": 0.374, + "step": 46170 + }, + { + "epoch": 1.783080427815746, + "grad_norm": 1.4147247076034546, + "learning_rate": 8.113054558091046e-05, + "loss": 0.1202, + "step": 46180 + }, + { + "epoch": 1.7834665431097725, + "grad_norm": 0.9888404011726379, + "learning_rate": 8.110480456130868e-05, + "loss": 0.2564, + "step": 46190 + }, + { + "epoch": 1.7838526584037995, + "grad_norm": 1.2556307315826416, + "learning_rate": 8.107906354170689e-05, + "loss": 0.2403, + "step": 46200 + }, + { + "epoch": 1.784238773697826, + "grad_norm": 1.5373485088348389, + "learning_rate": 8.10533225221051e-05, + "loss": 0.23, + "step": 46210 + }, + { + "epoch": 1.784624888991853, + "grad_norm": 1.1403309106826782, + "learning_rate": 8.102758150250332e-05, + "loss": 0.2383, + "step": 46220 + }, + { + "epoch": 1.7850110042858798, + "grad_norm": 1.4236074686050415, + "learning_rate": 8.100184048290153e-05, + "loss": 0.2121, + "step": 46230 + }, + { + "epoch": 1.7853971195799065, + "grad_norm": 1.9537183046340942, + "learning_rate": 8.097609946329974e-05, + "loss": 0.2512, + "step": 46240 + }, + { + "epoch": 1.7857832348739333, + "grad_norm": 0.10802419483661652, + "learning_rate": 8.095035844369796e-05, + "loss": 0.2871, + "step": 46250 + }, + { + "epoch": 1.78616935016796, + "grad_norm": 0.9327038526535034, + "learning_rate": 8.092461742409617e-05, + "loss": 0.0487, + "step": 46260 + }, + { + "epoch": 1.786555465461987, + "grad_norm": 2.114015817642212, + "learning_rate": 8.089887640449438e-05, + "loss": 0.3128, + "step": 46270 + }, + { + "epoch": 1.7869415807560136, + "grad_norm": 1.2681559324264526, + "learning_rate": 8.08731353848926e-05, + "loss": 0.2883, + "step": 46280 + }, + { + "epoch": 1.7873276960500406, + "grad_norm": 1.0767977237701416, + "learning_rate": 8.084739436529082e-05, + "loss": 0.1632, + "step": 46290 + }, + { + "epoch": 1.7877138113440674, + "grad_norm": 0.9782537221908569, + "learning_rate": 8.082165334568902e-05, + "loss": 0.2489, + "step": 46300 + }, + { + "epoch": 1.7880999266380941, + "grad_norm": 0.3115275204181671, + "learning_rate": 8.079591232608724e-05, + "loss": 0.3691, + "step": 46310 + }, + { + "epoch": 1.788486041932121, + "grad_norm": 1.7753130197525024, + "learning_rate": 8.077017130648545e-05, + "loss": 0.2362, + "step": 46320 + }, + { + "epoch": 1.7888721572261477, + "grad_norm": 1.5351624488830566, + "learning_rate": 8.074443028688366e-05, + "loss": 0.4439, + "step": 46330 + }, + { + "epoch": 1.7892582725201747, + "grad_norm": 0.15024085342884064, + "learning_rate": 8.071868926728188e-05, + "loss": 0.2309, + "step": 46340 + }, + { + "epoch": 1.7896443878142012, + "grad_norm": 0.3030475974082947, + "learning_rate": 8.069294824768009e-05, + "loss": 0.1032, + "step": 46350 + }, + { + "epoch": 1.7900305031082282, + "grad_norm": 2.4676828384399414, + "learning_rate": 8.066720722807832e-05, + "loss": 0.4874, + "step": 46360 + }, + { + "epoch": 1.790416618402255, + "grad_norm": 1.4394946098327637, + "learning_rate": 8.064146620847653e-05, + "loss": 0.1635, + "step": 46370 + }, + { + "epoch": 1.7908027336962817, + "grad_norm": 0.05110672488808632, + "learning_rate": 8.061572518887473e-05, + "loss": 0.1001, + "step": 46380 + }, + { + "epoch": 1.7911888489903085, + "grad_norm": 0.9311584830284119, + "learning_rate": 8.058998416927294e-05, + "loss": 0.288, + "step": 46390 + }, + { + "epoch": 1.7915749642843353, + "grad_norm": 0.869853675365448, + "learning_rate": 8.056424314967116e-05, + "loss": 0.3104, + "step": 46400 + }, + { + "epoch": 1.7919610795783623, + "grad_norm": 1.8579514026641846, + "learning_rate": 8.053850213006937e-05, + "loss": 0.1846, + "step": 46410 + }, + { + "epoch": 1.7923471948723888, + "grad_norm": 1.0974096059799194, + "learning_rate": 8.05127611104676e-05, + "loss": 0.2144, + "step": 46420 + }, + { + "epoch": 1.7927333101664158, + "grad_norm": 1.3006702661514282, + "learning_rate": 8.048702009086581e-05, + "loss": 0.4802, + "step": 46430 + }, + { + "epoch": 1.7931194254604423, + "grad_norm": 0.41811448335647583, + "learning_rate": 8.046127907126402e-05, + "loss": 0.3352, + "step": 46440 + }, + { + "epoch": 1.7935055407544693, + "grad_norm": 1.1778554916381836, + "learning_rate": 8.043553805166222e-05, + "loss": 0.2485, + "step": 46450 + }, + { + "epoch": 1.793891656048496, + "grad_norm": 0.10794985294342041, + "learning_rate": 8.040979703206044e-05, + "loss": 0.3235, + "step": 46460 + }, + { + "epoch": 1.7942777713425229, + "grad_norm": 0.6999881267547607, + "learning_rate": 8.038405601245865e-05, + "loss": 0.2148, + "step": 46470 + }, + { + "epoch": 1.7946638866365496, + "grad_norm": 1.2335025072097778, + "learning_rate": 8.035831499285688e-05, + "loss": 0.2128, + "step": 46480 + }, + { + "epoch": 1.7950500019305764, + "grad_norm": 0.23150336742401123, + "learning_rate": 8.033257397325509e-05, + "loss": 0.3316, + "step": 46490 + }, + { + "epoch": 1.7954361172246034, + "grad_norm": 2.0523316860198975, + "learning_rate": 8.03068329536533e-05, + "loss": 0.1892, + "step": 46500 + }, + { + "epoch": 1.79582223251863, + "grad_norm": 1.127738356590271, + "learning_rate": 8.028109193405152e-05, + "loss": 0.2521, + "step": 46510 + }, + { + "epoch": 1.796208347812657, + "grad_norm": 1.5948694944381714, + "learning_rate": 8.025535091444972e-05, + "loss": 0.3407, + "step": 46520 + }, + { + "epoch": 1.7965944631066837, + "grad_norm": 0.6383172273635864, + "learning_rate": 8.022960989484793e-05, + "loss": 0.271, + "step": 46530 + }, + { + "epoch": 1.7969805784007105, + "grad_norm": 1.016988754272461, + "learning_rate": 8.020386887524616e-05, + "loss": 0.1699, + "step": 46540 + }, + { + "epoch": 1.7973666936947372, + "grad_norm": 0.7680085897445679, + "learning_rate": 8.017812785564437e-05, + "loss": 0.193, + "step": 46550 + }, + { + "epoch": 1.797752808988764, + "grad_norm": 0.36858290433883667, + "learning_rate": 8.015238683604258e-05, + "loss": 0.2332, + "step": 46560 + }, + { + "epoch": 1.798138924282791, + "grad_norm": 6.89350700378418, + "learning_rate": 8.01266458164408e-05, + "loss": 0.6444, + "step": 46570 + }, + { + "epoch": 1.7985250395768175, + "grad_norm": 0.3459351360797882, + "learning_rate": 8.010090479683901e-05, + "loss": 0.2327, + "step": 46580 + }, + { + "epoch": 1.7989111548708445, + "grad_norm": 1.195894718170166, + "learning_rate": 8.007516377723722e-05, + "loss": 0.2167, + "step": 46590 + }, + { + "epoch": 1.799297270164871, + "grad_norm": 3.738119602203369, + "learning_rate": 8.004942275763542e-05, + "loss": 0.1946, + "step": 46600 + }, + { + "epoch": 1.799683385458898, + "grad_norm": 1.207139015197754, + "learning_rate": 8.002368173803365e-05, + "loss": 0.2888, + "step": 46610 + }, + { + "epoch": 1.8000695007529248, + "grad_norm": 0.2352428138256073, + "learning_rate": 7.999794071843186e-05, + "loss": 0.301, + "step": 46620 + }, + { + "epoch": 1.8004556160469516, + "grad_norm": 0.4833974242210388, + "learning_rate": 7.997219969883008e-05, + "loss": 0.1984, + "step": 46630 + }, + { + "epoch": 1.8008417313409786, + "grad_norm": 0.27819526195526123, + "learning_rate": 7.994645867922829e-05, + "loss": 0.229, + "step": 46640 + }, + { + "epoch": 1.8012278466350051, + "grad_norm": 1.2809014320373535, + "learning_rate": 7.99207176596265e-05, + "loss": 0.328, + "step": 46650 + }, + { + "epoch": 1.8016139619290321, + "grad_norm": 3.3877575397491455, + "learning_rate": 7.989497664002472e-05, + "loss": 0.236, + "step": 46660 + }, + { + "epoch": 1.8020000772230587, + "grad_norm": 0.5104680061340332, + "learning_rate": 7.986923562042293e-05, + "loss": 0.1853, + "step": 46670 + }, + { + "epoch": 1.8023861925170857, + "grad_norm": 0.6869563460350037, + "learning_rate": 7.984349460082114e-05, + "loss": 0.1743, + "step": 46680 + }, + { + "epoch": 1.8027723078111124, + "grad_norm": 1.9329041242599487, + "learning_rate": 7.981775358121936e-05, + "loss": 0.3057, + "step": 46690 + }, + { + "epoch": 1.8031584231051392, + "grad_norm": 0.9997574090957642, + "learning_rate": 7.979201256161757e-05, + "loss": 0.2145, + "step": 46700 + }, + { + "epoch": 1.803544538399166, + "grad_norm": 1.173745036125183, + "learning_rate": 7.976627154201578e-05, + "loss": 0.2315, + "step": 46710 + }, + { + "epoch": 1.8039306536931927, + "grad_norm": 0.9252222180366516, + "learning_rate": 7.9740530522414e-05, + "loss": 0.3173, + "step": 46720 + }, + { + "epoch": 1.8043167689872197, + "grad_norm": 1.316101312637329, + "learning_rate": 7.971478950281221e-05, + "loss": 0.1249, + "step": 46730 + }, + { + "epoch": 1.8047028842812463, + "grad_norm": 1.1243810653686523, + "learning_rate": 7.968904848321042e-05, + "loss": 0.3688, + "step": 46740 + }, + { + "epoch": 1.8050889995752732, + "grad_norm": 2.3322315216064453, + "learning_rate": 7.966330746360864e-05, + "loss": 0.2379, + "step": 46750 + }, + { + "epoch": 1.8054751148693, + "grad_norm": 1.534040927886963, + "learning_rate": 7.963756644400685e-05, + "loss": 0.365, + "step": 46760 + }, + { + "epoch": 1.8058612301633268, + "grad_norm": 1.326835036277771, + "learning_rate": 7.961182542440506e-05, + "loss": 0.1909, + "step": 46770 + }, + { + "epoch": 1.8062473454573535, + "grad_norm": 1.8357387781143188, + "learning_rate": 7.958608440480328e-05, + "loss": 0.3187, + "step": 46780 + }, + { + "epoch": 1.8066334607513803, + "grad_norm": 0.9270907044410706, + "learning_rate": 7.95603433852015e-05, + "loss": 0.2022, + "step": 46790 + }, + { + "epoch": 1.8070195760454073, + "grad_norm": 0.5300419330596924, + "learning_rate": 7.95346023655997e-05, + "loss": 0.2015, + "step": 46800 + }, + { + "epoch": 1.8074056913394339, + "grad_norm": 0.35859522223472595, + "learning_rate": 7.950886134599792e-05, + "loss": 0.1764, + "step": 46810 + }, + { + "epoch": 1.8077918066334608, + "grad_norm": 0.5568848252296448, + "learning_rate": 7.948312032639613e-05, + "loss": 0.1862, + "step": 46820 + }, + { + "epoch": 1.8081779219274874, + "grad_norm": 0.22550754249095917, + "learning_rate": 7.945737930679434e-05, + "loss": 0.2237, + "step": 46830 + }, + { + "epoch": 1.8085640372215144, + "grad_norm": 1.2444871664047241, + "learning_rate": 7.943163828719256e-05, + "loss": 0.2077, + "step": 46840 + }, + { + "epoch": 1.8089501525155411, + "grad_norm": 0.9609934091567993, + "learning_rate": 7.940589726759077e-05, + "loss": 0.2717, + "step": 46850 + }, + { + "epoch": 1.809336267809568, + "grad_norm": 0.2863350808620453, + "learning_rate": 7.9380156247989e-05, + "loss": 0.3284, + "step": 46860 + }, + { + "epoch": 1.8097223831035947, + "grad_norm": 1.6350208520889282, + "learning_rate": 7.93544152283872e-05, + "loss": 0.0919, + "step": 46870 + }, + { + "epoch": 1.8101084983976214, + "grad_norm": 0.5808874368667603, + "learning_rate": 7.932867420878541e-05, + "loss": 0.165, + "step": 46880 + }, + { + "epoch": 1.8104946136916484, + "grad_norm": 0.5033291578292847, + "learning_rate": 7.930293318918362e-05, + "loss": 0.192, + "step": 46890 + }, + { + "epoch": 1.810880728985675, + "grad_norm": 3.8204874992370605, + "learning_rate": 7.927719216958184e-05, + "loss": 0.3885, + "step": 46900 + }, + { + "epoch": 1.811266844279702, + "grad_norm": 0.9689306020736694, + "learning_rate": 7.925145114998005e-05, + "loss": 0.2609, + "step": 46910 + }, + { + "epoch": 1.8116529595737287, + "grad_norm": 2.7360000610351562, + "learning_rate": 7.922571013037828e-05, + "loss": 0.1955, + "step": 46920 + }, + { + "epoch": 1.8120390748677555, + "grad_norm": 1.2181187868118286, + "learning_rate": 7.919996911077649e-05, + "loss": 0.251, + "step": 46930 + }, + { + "epoch": 1.8124251901617823, + "grad_norm": 0.2421361356973648, + "learning_rate": 7.91742280911747e-05, + "loss": 0.1401, + "step": 46940 + }, + { + "epoch": 1.812811305455809, + "grad_norm": 1.1567500829696655, + "learning_rate": 7.91484870715729e-05, + "loss": 0.2303, + "step": 46950 + }, + { + "epoch": 1.813197420749836, + "grad_norm": 2.173985481262207, + "learning_rate": 7.912274605197112e-05, + "loss": 0.3141, + "step": 46960 + }, + { + "epoch": 1.8135835360438626, + "grad_norm": 0.21923333406448364, + "learning_rate": 7.909700503236933e-05, + "loss": 0.3109, + "step": 46970 + }, + { + "epoch": 1.8139696513378896, + "grad_norm": 2.7012853622436523, + "learning_rate": 7.907126401276756e-05, + "loss": 0.2912, + "step": 46980 + }, + { + "epoch": 1.8143557666319163, + "grad_norm": 1.6185250282287598, + "learning_rate": 7.904552299316577e-05, + "loss": 0.1989, + "step": 46990 + }, + { + "epoch": 1.814741881925943, + "grad_norm": 1.2078602313995361, + "learning_rate": 7.901978197356398e-05, + "loss": 0.2906, + "step": 47000 + }, + { + "epoch": 1.8151279972199699, + "grad_norm": 0.8695535063743591, + "learning_rate": 7.89940409539622e-05, + "loss": 0.2486, + "step": 47010 + }, + { + "epoch": 1.8155141125139966, + "grad_norm": 1.7880644798278809, + "learning_rate": 7.89682999343604e-05, + "loss": 0.2963, + "step": 47020 + }, + { + "epoch": 1.8159002278080236, + "grad_norm": 1.9220221042633057, + "learning_rate": 7.894255891475861e-05, + "loss": 0.2822, + "step": 47030 + }, + { + "epoch": 1.8162863431020502, + "grad_norm": 0.2507815361022949, + "learning_rate": 7.891681789515684e-05, + "loss": 0.165, + "step": 47040 + }, + { + "epoch": 1.8166724583960772, + "grad_norm": 0.8847838640213013, + "learning_rate": 7.889107687555505e-05, + "loss": 0.1536, + "step": 47050 + }, + { + "epoch": 1.8170585736901037, + "grad_norm": 0.7972745895385742, + "learning_rate": 7.886533585595326e-05, + "loss": 0.4152, + "step": 47060 + }, + { + "epoch": 1.8174446889841307, + "grad_norm": 3.005274534225464, + "learning_rate": 7.883959483635148e-05, + "loss": 0.2491, + "step": 47070 + }, + { + "epoch": 1.8178308042781575, + "grad_norm": 0.19801591336727142, + "learning_rate": 7.881385381674969e-05, + "loss": 0.2671, + "step": 47080 + }, + { + "epoch": 1.8182169195721842, + "grad_norm": 0.43803074955940247, + "learning_rate": 7.878811279714789e-05, + "loss": 0.1101, + "step": 47090 + }, + { + "epoch": 1.818603034866211, + "grad_norm": 2.517371416091919, + "learning_rate": 7.876237177754612e-05, + "loss": 0.2759, + "step": 47100 + }, + { + "epoch": 1.8189891501602378, + "grad_norm": 1.0714353322982788, + "learning_rate": 7.873663075794433e-05, + "loss": 0.2247, + "step": 47110 + }, + { + "epoch": 1.8193752654542648, + "grad_norm": 1.3440790176391602, + "learning_rate": 7.871088973834254e-05, + "loss": 0.1309, + "step": 47120 + }, + { + "epoch": 1.8197613807482913, + "grad_norm": 0.10665372759103775, + "learning_rate": 7.868514871874076e-05, + "loss": 0.3176, + "step": 47130 + }, + { + "epoch": 1.8201474960423183, + "grad_norm": 0.6812877655029297, + "learning_rate": 7.865940769913897e-05, + "loss": 0.1709, + "step": 47140 + }, + { + "epoch": 1.820533611336345, + "grad_norm": 0.3776918649673462, + "learning_rate": 7.863366667953718e-05, + "loss": 0.1949, + "step": 47150 + }, + { + "epoch": 1.8209197266303718, + "grad_norm": 0.7111637592315674, + "learning_rate": 7.860792565993538e-05, + "loss": 0.2463, + "step": 47160 + }, + { + "epoch": 1.8213058419243986, + "grad_norm": 0.5845387578010559, + "learning_rate": 7.858218464033361e-05, + "loss": 0.285, + "step": 47170 + }, + { + "epoch": 1.8216919572184254, + "grad_norm": 0.394326776266098, + "learning_rate": 7.855644362073182e-05, + "loss": 0.1547, + "step": 47180 + }, + { + "epoch": 1.8220780725124524, + "grad_norm": 1.6686782836914062, + "learning_rate": 7.853070260113004e-05, + "loss": 0.2764, + "step": 47190 + }, + { + "epoch": 1.822464187806479, + "grad_norm": 0.4293366074562073, + "learning_rate": 7.850496158152825e-05, + "loss": 0.2277, + "step": 47200 + }, + { + "epoch": 1.8228503031005059, + "grad_norm": 0.5316643118858337, + "learning_rate": 7.847922056192646e-05, + "loss": 0.2015, + "step": 47210 + }, + { + "epoch": 1.8232364183945327, + "grad_norm": 1.1477640867233276, + "learning_rate": 7.845347954232468e-05, + "loss": 0.4081, + "step": 47220 + }, + { + "epoch": 1.8236225336885594, + "grad_norm": 1.0033808946609497, + "learning_rate": 7.842773852272289e-05, + "loss": 0.1356, + "step": 47230 + }, + { + "epoch": 1.8240086489825862, + "grad_norm": 1.0021567344665527, + "learning_rate": 7.84019975031211e-05, + "loss": 0.161, + "step": 47240 + }, + { + "epoch": 1.824394764276613, + "grad_norm": 0.733974277973175, + "learning_rate": 7.837625648351932e-05, + "loss": 0.1246, + "step": 47250 + }, + { + "epoch": 1.82478087957064, + "grad_norm": 1.1681737899780273, + "learning_rate": 7.835051546391753e-05, + "loss": 0.3287, + "step": 47260 + }, + { + "epoch": 1.8251669948646665, + "grad_norm": 2.490657329559326, + "learning_rate": 7.832477444431574e-05, + "loss": 0.2203, + "step": 47270 + }, + { + "epoch": 1.8255531101586935, + "grad_norm": 1.1667157411575317, + "learning_rate": 7.829903342471396e-05, + "loss": 0.2827, + "step": 47280 + }, + { + "epoch": 1.82593922545272, + "grad_norm": 0.31772786378860474, + "learning_rate": 7.827329240511217e-05, + "loss": 0.1664, + "step": 47290 + }, + { + "epoch": 1.826325340746747, + "grad_norm": 1.1539255380630493, + "learning_rate": 7.824755138551038e-05, + "loss": 0.207, + "step": 47300 + }, + { + "epoch": 1.8267114560407738, + "grad_norm": 0.9592777490615845, + "learning_rate": 7.82218103659086e-05, + "loss": 0.193, + "step": 47310 + }, + { + "epoch": 1.8270975713348006, + "grad_norm": 0.6902135014533997, + "learning_rate": 7.819606934630681e-05, + "loss": 0.2119, + "step": 47320 + }, + { + "epoch": 1.8274836866288273, + "grad_norm": 1.4007855653762817, + "learning_rate": 7.817032832670502e-05, + "loss": 0.2046, + "step": 47330 + }, + { + "epoch": 1.827869801922854, + "grad_norm": 1.520080804824829, + "learning_rate": 7.814458730710324e-05, + "loss": 0.2861, + "step": 47340 + }, + { + "epoch": 1.828255917216881, + "grad_norm": 2.576406478881836, + "learning_rate": 7.811884628750146e-05, + "loss": 0.2615, + "step": 47350 + }, + { + "epoch": 1.8286420325109076, + "grad_norm": 1.499143362045288, + "learning_rate": 7.809310526789968e-05, + "loss": 0.4094, + "step": 47360 + }, + { + "epoch": 1.8290281478049346, + "grad_norm": 0.9561934471130371, + "learning_rate": 7.806736424829787e-05, + "loss": 0.2121, + "step": 47370 + }, + { + "epoch": 1.8294142630989614, + "grad_norm": 1.838971734046936, + "learning_rate": 7.804162322869609e-05, + "loss": 0.325, + "step": 47380 + }, + { + "epoch": 1.8298003783929881, + "grad_norm": 1.1759018898010254, + "learning_rate": 7.80158822090943e-05, + "loss": 0.1095, + "step": 47390 + }, + { + "epoch": 1.830186493687015, + "grad_norm": 0.4624423086643219, + "learning_rate": 7.799014118949251e-05, + "loss": 0.264, + "step": 47400 + }, + { + "epoch": 1.8305726089810417, + "grad_norm": 2.0572640895843506, + "learning_rate": 7.796440016989073e-05, + "loss": 0.3171, + "step": 47410 + }, + { + "epoch": 1.8309587242750687, + "grad_norm": 0.959831714630127, + "learning_rate": 7.793865915028896e-05, + "loss": 0.1588, + "step": 47420 + }, + { + "epoch": 1.8313448395690952, + "grad_norm": 0.7953433394432068, + "learning_rate": 7.791291813068717e-05, + "loss": 0.3992, + "step": 47430 + }, + { + "epoch": 1.8317309548631222, + "grad_norm": 0.8194203972816467, + "learning_rate": 7.788717711108537e-05, + "loss": 0.2718, + "step": 47440 + }, + { + "epoch": 1.832117070157149, + "grad_norm": 1.7283341884613037, + "learning_rate": 7.786143609148358e-05, + "loss": 0.3977, + "step": 47450 + }, + { + "epoch": 1.8325031854511757, + "grad_norm": 0.15646076202392578, + "learning_rate": 7.78356950718818e-05, + "loss": 0.2449, + "step": 47460 + }, + { + "epoch": 1.8328893007452025, + "grad_norm": 1.0211836099624634, + "learning_rate": 7.780995405228001e-05, + "loss": 0.2022, + "step": 47470 + }, + { + "epoch": 1.8332754160392293, + "grad_norm": 0.8658961653709412, + "learning_rate": 7.778421303267823e-05, + "loss": 0.2429, + "step": 47480 + }, + { + "epoch": 1.8336615313332563, + "grad_norm": 1.2359483242034912, + "learning_rate": 7.775847201307645e-05, + "loss": 0.3284, + "step": 47490 + }, + { + "epoch": 1.8340476466272828, + "grad_norm": 1.2980804443359375, + "learning_rate": 7.773273099347466e-05, + "loss": 0.1205, + "step": 47500 + }, + { + "epoch": 1.8344337619213098, + "grad_norm": 0.06653542071580887, + "learning_rate": 7.770698997387286e-05, + "loss": 0.1376, + "step": 47510 + }, + { + "epoch": 1.8348198772153363, + "grad_norm": 1.0570144653320312, + "learning_rate": 7.768124895427107e-05, + "loss": 0.1288, + "step": 47520 + }, + { + "epoch": 1.8352059925093633, + "grad_norm": 1.0040183067321777, + "learning_rate": 7.765550793466929e-05, + "loss": 0.1825, + "step": 47530 + }, + { + "epoch": 1.83559210780339, + "grad_norm": 2.5596699714660645, + "learning_rate": 7.762976691506751e-05, + "loss": 0.1764, + "step": 47540 + }, + { + "epoch": 1.8359782230974169, + "grad_norm": 4.138411521911621, + "learning_rate": 7.760402589546573e-05, + "loss": 0.2783, + "step": 47550 + }, + { + "epoch": 1.8363643383914436, + "grad_norm": 0.8152881860733032, + "learning_rate": 7.757828487586394e-05, + "loss": 0.1424, + "step": 47560 + }, + { + "epoch": 1.8367504536854704, + "grad_norm": 2.608886480331421, + "learning_rate": 7.755254385626215e-05, + "loss": 0.161, + "step": 47570 + }, + { + "epoch": 1.8371365689794974, + "grad_norm": 2.870406150817871, + "learning_rate": 7.752680283666037e-05, + "loss": 0.6419, + "step": 47580 + }, + { + "epoch": 1.837522684273524, + "grad_norm": 1.2127447128295898, + "learning_rate": 7.750106181705857e-05, + "loss": 0.1987, + "step": 47590 + }, + { + "epoch": 1.837908799567551, + "grad_norm": 0.44798439741134644, + "learning_rate": 7.74753207974568e-05, + "loss": 0.2313, + "step": 47600 + }, + { + "epoch": 1.8382949148615777, + "grad_norm": 0.434055894613266, + "learning_rate": 7.744957977785501e-05, + "loss": 0.1227, + "step": 47610 + }, + { + "epoch": 1.8386810301556045, + "grad_norm": 1.0160471200942993, + "learning_rate": 7.742383875825322e-05, + "loss": 0.2699, + "step": 47620 + }, + { + "epoch": 1.8390671454496312, + "grad_norm": 2.0095553398132324, + "learning_rate": 7.739809773865143e-05, + "loss": 0.2666, + "step": 47630 + }, + { + "epoch": 1.839453260743658, + "grad_norm": 2.849498748779297, + "learning_rate": 7.737235671904965e-05, + "loss": 0.2798, + "step": 47640 + }, + { + "epoch": 1.839839376037685, + "grad_norm": 2.336435556411743, + "learning_rate": 7.734661569944786e-05, + "loss": 0.3048, + "step": 47650 + }, + { + "epoch": 1.8402254913317115, + "grad_norm": 0.8438564538955688, + "learning_rate": 7.732087467984606e-05, + "loss": 0.2876, + "step": 47660 + }, + { + "epoch": 1.8406116066257385, + "grad_norm": 0.514867901802063, + "learning_rate": 7.729513366024429e-05, + "loss": 0.2249, + "step": 47670 + }, + { + "epoch": 1.8409977219197653, + "grad_norm": 4.391468048095703, + "learning_rate": 7.72693926406425e-05, + "loss": 0.3918, + "step": 47680 + }, + { + "epoch": 1.841383837213792, + "grad_norm": 2.4497931003570557, + "learning_rate": 7.724365162104071e-05, + "loss": 0.3368, + "step": 47690 + }, + { + "epoch": 1.8417699525078188, + "grad_norm": 2.884559392929077, + "learning_rate": 7.721791060143893e-05, + "loss": 0.2406, + "step": 47700 + }, + { + "epoch": 1.8421560678018456, + "grad_norm": 1.099225640296936, + "learning_rate": 7.719216958183714e-05, + "loss": 0.1759, + "step": 47710 + }, + { + "epoch": 1.8425421830958726, + "grad_norm": 0.881747305393219, + "learning_rate": 7.716642856223535e-05, + "loss": 0.2377, + "step": 47720 + }, + { + "epoch": 1.8429282983898991, + "grad_norm": 1.7341536283493042, + "learning_rate": 7.714068754263357e-05, + "loss": 0.1593, + "step": 47730 + }, + { + "epoch": 1.8433144136839261, + "grad_norm": 0.8997848629951477, + "learning_rate": 7.711494652303178e-05, + "loss": 0.2198, + "step": 47740 + }, + { + "epoch": 1.8437005289779527, + "grad_norm": 0.38423866033554077, + "learning_rate": 7.708920550343e-05, + "loss": 0.2539, + "step": 47750 + }, + { + "epoch": 1.8440866442719797, + "grad_norm": 1.0857203006744385, + "learning_rate": 7.706346448382821e-05, + "loss": 0.1595, + "step": 47760 + }, + { + "epoch": 1.8444727595660064, + "grad_norm": 1.5009055137634277, + "learning_rate": 7.703772346422642e-05, + "loss": 0.2478, + "step": 47770 + }, + { + "epoch": 1.8448588748600332, + "grad_norm": 0.13288016617298126, + "learning_rate": 7.701198244462463e-05, + "loss": 0.2184, + "step": 47780 + }, + { + "epoch": 1.84524499015406, + "grad_norm": 1.167777180671692, + "learning_rate": 7.698624142502285e-05, + "loss": 0.3157, + "step": 47790 + }, + { + "epoch": 1.8456311054480867, + "grad_norm": 0.5168697834014893, + "learning_rate": 7.696050040542106e-05, + "loss": 0.1543, + "step": 47800 + }, + { + "epoch": 1.8460172207421137, + "grad_norm": 0.7516908645629883, + "learning_rate": 7.693475938581927e-05, + "loss": 0.1869, + "step": 47810 + }, + { + "epoch": 1.8464033360361403, + "grad_norm": 3.0962419509887695, + "learning_rate": 7.690901836621749e-05, + "loss": 0.3891, + "step": 47820 + }, + { + "epoch": 1.8467894513301673, + "grad_norm": 0.05181325227022171, + "learning_rate": 7.68832773466157e-05, + "loss": 0.3427, + "step": 47830 + }, + { + "epoch": 1.847175566624194, + "grad_norm": 2.856113910675049, + "learning_rate": 7.685753632701391e-05, + "loss": 0.2148, + "step": 47840 + }, + { + "epoch": 1.8475616819182208, + "grad_norm": 0.4950585663318634, + "learning_rate": 7.683179530741214e-05, + "loss": 0.2689, + "step": 47850 + }, + { + "epoch": 1.8479477972122476, + "grad_norm": 2.390775680541992, + "learning_rate": 7.680605428781034e-05, + "loss": 0.3389, + "step": 47860 + }, + { + "epoch": 1.8483339125062743, + "grad_norm": 0.1536688208580017, + "learning_rate": 7.678031326820855e-05, + "loss": 0.1811, + "step": 47870 + }, + { + "epoch": 1.8487200278003013, + "grad_norm": 0.26756611466407776, + "learning_rate": 7.675457224860677e-05, + "loss": 0.1735, + "step": 47880 + }, + { + "epoch": 1.8491061430943279, + "grad_norm": 1.3034265041351318, + "learning_rate": 7.672883122900498e-05, + "loss": 0.1554, + "step": 47890 + }, + { + "epoch": 1.8494922583883548, + "grad_norm": 1.1781542301177979, + "learning_rate": 7.67030902094032e-05, + "loss": 0.4027, + "step": 47900 + }, + { + "epoch": 1.8498783736823814, + "grad_norm": 1.2090016603469849, + "learning_rate": 7.667734918980141e-05, + "loss": 0.1873, + "step": 47910 + }, + { + "epoch": 1.8502644889764084, + "grad_norm": 1.5354760885238647, + "learning_rate": 7.665160817019963e-05, + "loss": 0.2218, + "step": 47920 + }, + { + "epoch": 1.8506506042704352, + "grad_norm": 2.093831777572632, + "learning_rate": 7.662586715059785e-05, + "loss": 0.2591, + "step": 47930 + }, + { + "epoch": 1.851036719564462, + "grad_norm": 0.9484484195709229, + "learning_rate": 7.660012613099605e-05, + "loss": 0.252, + "step": 47940 + }, + { + "epoch": 1.851422834858489, + "grad_norm": 0.15661562979221344, + "learning_rate": 7.657438511139426e-05, + "loss": 0.2673, + "step": 47950 + }, + { + "epoch": 1.8518089501525155, + "grad_norm": 0.5501863360404968, + "learning_rate": 7.654864409179247e-05, + "loss": 0.2636, + "step": 47960 + }, + { + "epoch": 1.8521950654465424, + "grad_norm": 1.455328345298767, + "learning_rate": 7.652290307219069e-05, + "loss": 0.2085, + "step": 47970 + }, + { + "epoch": 1.852581180740569, + "grad_norm": 0.310106098651886, + "learning_rate": 7.649716205258891e-05, + "loss": 0.1839, + "step": 47980 + }, + { + "epoch": 1.852967296034596, + "grad_norm": 0.444260835647583, + "learning_rate": 7.647142103298713e-05, + "loss": 0.2434, + "step": 47990 + }, + { + "epoch": 1.8533534113286227, + "grad_norm": 0.5278909206390381, + "learning_rate": 7.644568001338534e-05, + "loss": 0.2472, + "step": 48000 + }, + { + "epoch": 1.8537395266226495, + "grad_norm": 0.4239410161972046, + "learning_rate": 7.641993899378354e-05, + "loss": 0.2854, + "step": 48010 + }, + { + "epoch": 1.8541256419166763, + "grad_norm": 0.9763671159744263, + "learning_rate": 7.639419797418175e-05, + "loss": 0.2737, + "step": 48020 + }, + { + "epoch": 1.854511757210703, + "grad_norm": 2.139054775238037, + "learning_rate": 7.636845695457997e-05, + "loss": 0.2553, + "step": 48030 + }, + { + "epoch": 1.85489787250473, + "grad_norm": 1.8417706489562988, + "learning_rate": 7.63427159349782e-05, + "loss": 0.3787, + "step": 48040 + }, + { + "epoch": 1.8552839877987566, + "grad_norm": 1.2950854301452637, + "learning_rate": 7.631697491537641e-05, + "loss": 0.1859, + "step": 48050 + }, + { + "epoch": 1.8556701030927836, + "grad_norm": 0.9440277218818665, + "learning_rate": 7.629123389577462e-05, + "loss": 0.2362, + "step": 48060 + }, + { + "epoch": 1.8560562183868103, + "grad_norm": 2.829890012741089, + "learning_rate": 7.626549287617283e-05, + "loss": 0.264, + "step": 48070 + }, + { + "epoch": 1.856442333680837, + "grad_norm": 1.5721958875656128, + "learning_rate": 7.623975185657103e-05, + "loss": 0.2834, + "step": 48080 + }, + { + "epoch": 1.8568284489748639, + "grad_norm": 0.7574679851531982, + "learning_rate": 7.621401083696925e-05, + "loss": 0.1546, + "step": 48090 + }, + { + "epoch": 1.8572145642688906, + "grad_norm": 0.4562332332134247, + "learning_rate": 7.618826981736747e-05, + "loss": 0.2419, + "step": 48100 + }, + { + "epoch": 1.8576006795629176, + "grad_norm": 1.8063342571258545, + "learning_rate": 7.616252879776569e-05, + "loss": 0.5576, + "step": 48110 + }, + { + "epoch": 1.8579867948569442, + "grad_norm": 1.7112247943878174, + "learning_rate": 7.61367877781639e-05, + "loss": 0.2379, + "step": 48120 + }, + { + "epoch": 1.8583729101509712, + "grad_norm": 0.696594774723053, + "learning_rate": 7.611104675856211e-05, + "loss": 0.2704, + "step": 48130 + }, + { + "epoch": 1.8587590254449977, + "grad_norm": 1.1555263996124268, + "learning_rate": 7.608530573896033e-05, + "loss": 0.2835, + "step": 48140 + }, + { + "epoch": 1.8591451407390247, + "grad_norm": 0.8256335854530334, + "learning_rate": 7.605956471935854e-05, + "loss": 0.3059, + "step": 48150 + }, + { + "epoch": 1.8595312560330515, + "grad_norm": 1.5511350631713867, + "learning_rate": 7.603382369975674e-05, + "loss": 0.2882, + "step": 48160 + }, + { + "epoch": 1.8599173713270782, + "grad_norm": 0.5888099074363708, + "learning_rate": 7.600808268015497e-05, + "loss": 0.2939, + "step": 48170 + }, + { + "epoch": 1.860303486621105, + "grad_norm": 1.3985711336135864, + "learning_rate": 7.598234166055318e-05, + "loss": 0.203, + "step": 48180 + }, + { + "epoch": 1.8606896019151318, + "grad_norm": 2.2253661155700684, + "learning_rate": 7.59566006409514e-05, + "loss": 0.2865, + "step": 48190 + }, + { + "epoch": 1.8610757172091588, + "grad_norm": 1.062969446182251, + "learning_rate": 7.593085962134961e-05, + "loss": 0.3292, + "step": 48200 + }, + { + "epoch": 1.8614618325031853, + "grad_norm": 1.0917813777923584, + "learning_rate": 7.590511860174782e-05, + "loss": 0.2274, + "step": 48210 + }, + { + "epoch": 1.8618479477972123, + "grad_norm": 1.3360031843185425, + "learning_rate": 7.587937758214603e-05, + "loss": 0.2381, + "step": 48220 + }, + { + "epoch": 1.862234063091239, + "grad_norm": 0.12084411829710007, + "learning_rate": 7.585363656254425e-05, + "loss": 0.1836, + "step": 48230 + }, + { + "epoch": 1.8626201783852658, + "grad_norm": 3.0013840198516846, + "learning_rate": 7.582789554294246e-05, + "loss": 0.2514, + "step": 48240 + }, + { + "epoch": 1.8630062936792926, + "grad_norm": 0.08427372574806213, + "learning_rate": 7.580215452334067e-05, + "loss": 0.4512, + "step": 48250 + }, + { + "epoch": 1.8633924089733194, + "grad_norm": 2.2700986862182617, + "learning_rate": 7.577641350373889e-05, + "loss": 0.4929, + "step": 48260 + }, + { + "epoch": 1.8637785242673464, + "grad_norm": 1.512097716331482, + "learning_rate": 7.57506724841371e-05, + "loss": 0.2595, + "step": 48270 + }, + { + "epoch": 1.864164639561373, + "grad_norm": 0.03086630441248417, + "learning_rate": 7.572493146453531e-05, + "loss": 0.2801, + "step": 48280 + }, + { + "epoch": 1.8645507548554, + "grad_norm": 1.8089312314987183, + "learning_rate": 7.569919044493353e-05, + "loss": 0.3094, + "step": 48290 + }, + { + "epoch": 1.8649368701494267, + "grad_norm": 1.2290606498718262, + "learning_rate": 7.567344942533174e-05, + "loss": 0.2952, + "step": 48300 + }, + { + "epoch": 1.8653229854434534, + "grad_norm": 2.2351982593536377, + "learning_rate": 7.564770840572995e-05, + "loss": 0.1904, + "step": 48310 + }, + { + "epoch": 1.8657091007374802, + "grad_norm": 1.007934808731079, + "learning_rate": 7.562196738612817e-05, + "loss": 0.174, + "step": 48320 + }, + { + "epoch": 1.866095216031507, + "grad_norm": 1.8931010961532593, + "learning_rate": 7.559622636652638e-05, + "loss": 0.2265, + "step": 48330 + }, + { + "epoch": 1.866481331325534, + "grad_norm": 7.616462230682373, + "learning_rate": 7.55704853469246e-05, + "loss": 0.194, + "step": 48340 + }, + { + "epoch": 1.8668674466195605, + "grad_norm": 0.8300217390060425, + "learning_rate": 7.554474432732282e-05, + "loss": 0.172, + "step": 48350 + }, + { + "epoch": 1.8672535619135875, + "grad_norm": 0.9842997789382935, + "learning_rate": 7.551900330772102e-05, + "loss": 0.2279, + "step": 48360 + }, + { + "epoch": 1.867639677207614, + "grad_norm": 1.950230360031128, + "learning_rate": 7.549326228811923e-05, + "loss": 0.2703, + "step": 48370 + }, + { + "epoch": 1.868025792501641, + "grad_norm": 1.9885706901550293, + "learning_rate": 7.546752126851745e-05, + "loss": 0.2584, + "step": 48380 + }, + { + "epoch": 1.8684119077956678, + "grad_norm": 0.6000315546989441, + "learning_rate": 7.544178024891566e-05, + "loss": 0.2729, + "step": 48390 + }, + { + "epoch": 1.8687980230896946, + "grad_norm": 0.922893226146698, + "learning_rate": 7.541603922931387e-05, + "loss": 0.1564, + "step": 48400 + }, + { + "epoch": 1.8691841383837213, + "grad_norm": 0.4918765723705292, + "learning_rate": 7.53902982097121e-05, + "loss": 0.1723, + "step": 48410 + }, + { + "epoch": 1.869570253677748, + "grad_norm": 0.9271582365036011, + "learning_rate": 7.536455719011031e-05, + "loss": 0.2593, + "step": 48420 + }, + { + "epoch": 1.869956368971775, + "grad_norm": 0.5880617499351501, + "learning_rate": 7.533881617050851e-05, + "loss": 0.1738, + "step": 48430 + }, + { + "epoch": 1.8703424842658016, + "grad_norm": 2.229809522628784, + "learning_rate": 7.531307515090673e-05, + "loss": 0.2722, + "step": 48440 + }, + { + "epoch": 1.8707285995598286, + "grad_norm": 0.47200268507003784, + "learning_rate": 7.528733413130494e-05, + "loss": 0.1359, + "step": 48450 + }, + { + "epoch": 1.8711147148538554, + "grad_norm": 1.7799588441848755, + "learning_rate": 7.526159311170315e-05, + "loss": 0.1806, + "step": 48460 + }, + { + "epoch": 1.8715008301478822, + "grad_norm": 0.7878087162971497, + "learning_rate": 7.523585209210137e-05, + "loss": 0.128, + "step": 48470 + }, + { + "epoch": 1.871886945441909, + "grad_norm": 0.666887104511261, + "learning_rate": 7.521011107249959e-05, + "loss": 0.3447, + "step": 48480 + }, + { + "epoch": 1.8722730607359357, + "grad_norm": 0.13966748118400574, + "learning_rate": 7.51843700528978e-05, + "loss": 0.2694, + "step": 48490 + }, + { + "epoch": 1.8726591760299627, + "grad_norm": 0.6305252909660339, + "learning_rate": 7.5158629033296e-05, + "loss": 0.1235, + "step": 48500 + }, + { + "epoch": 1.8730452913239892, + "grad_norm": 1.4558709859848022, + "learning_rate": 7.513288801369422e-05, + "loss": 0.1251, + "step": 48510 + }, + { + "epoch": 1.8734314066180162, + "grad_norm": 1.4596049785614014, + "learning_rate": 7.510714699409243e-05, + "loss": 0.3316, + "step": 48520 + }, + { + "epoch": 1.873817521912043, + "grad_norm": 0.6161240935325623, + "learning_rate": 7.508140597449065e-05, + "loss": 0.1035, + "step": 48530 + }, + { + "epoch": 1.8742036372060698, + "grad_norm": 2.402022123336792, + "learning_rate": 7.505566495488887e-05, + "loss": 0.2569, + "step": 48540 + }, + { + "epoch": 1.8745897525000965, + "grad_norm": 1.2332879304885864, + "learning_rate": 7.502992393528709e-05, + "loss": 0.0864, + "step": 48550 + }, + { + "epoch": 1.8749758677941233, + "grad_norm": 1.636063575744629, + "learning_rate": 7.50041829156853e-05, + "loss": 0.2673, + "step": 48560 + }, + { + "epoch": 1.8753619830881503, + "grad_norm": 1.0160930156707764, + "learning_rate": 7.497844189608351e-05, + "loss": 0.2759, + "step": 48570 + }, + { + "epoch": 1.8757480983821768, + "grad_norm": 2.007415771484375, + "learning_rate": 7.495270087648171e-05, + "loss": 0.328, + "step": 48580 + }, + { + "epoch": 1.8761342136762038, + "grad_norm": 2.852415084838867, + "learning_rate": 7.492695985687993e-05, + "loss": 0.2762, + "step": 48590 + }, + { + "epoch": 1.8765203289702304, + "grad_norm": 1.5660792589187622, + "learning_rate": 7.490121883727815e-05, + "loss": 0.3858, + "step": 48600 + }, + { + "epoch": 1.8769064442642573, + "grad_norm": 1.022005319595337, + "learning_rate": 7.487547781767637e-05, + "loss": 0.2508, + "step": 48610 + }, + { + "epoch": 1.8772925595582841, + "grad_norm": 0.9244334101676941, + "learning_rate": 7.484973679807458e-05, + "loss": 0.3577, + "step": 48620 + }, + { + "epoch": 1.8776786748523109, + "grad_norm": 0.6603676080703735, + "learning_rate": 7.482399577847279e-05, + "loss": 0.1577, + "step": 48630 + }, + { + "epoch": 1.8780647901463376, + "grad_norm": 1.8789231777191162, + "learning_rate": 7.4798254758871e-05, + "loss": 0.1705, + "step": 48640 + }, + { + "epoch": 1.8784509054403644, + "grad_norm": 2.0536692142486572, + "learning_rate": 7.47725137392692e-05, + "loss": 0.3087, + "step": 48650 + }, + { + "epoch": 1.8788370207343914, + "grad_norm": 1.1918115615844727, + "learning_rate": 7.474677271966743e-05, + "loss": 0.2027, + "step": 48660 + }, + { + "epoch": 1.879223136028418, + "grad_norm": 0.374523788690567, + "learning_rate": 7.472103170006565e-05, + "loss": 0.193, + "step": 48670 + }, + { + "epoch": 1.879609251322445, + "grad_norm": 1.481998324394226, + "learning_rate": 7.469529068046386e-05, + "loss": 0.246, + "step": 48680 + }, + { + "epoch": 1.8799953666164717, + "grad_norm": 0.9611921310424805, + "learning_rate": 7.466954966086207e-05, + "loss": 0.3557, + "step": 48690 + }, + { + "epoch": 1.8803814819104985, + "grad_norm": 2.2604222297668457, + "learning_rate": 7.464380864126029e-05, + "loss": 0.168, + "step": 48700 + }, + { + "epoch": 1.8807675972045252, + "grad_norm": 0.11883547157049179, + "learning_rate": 7.46180676216585e-05, + "loss": 0.1078, + "step": 48710 + }, + { + "epoch": 1.881153712498552, + "grad_norm": 1.0063214302062988, + "learning_rate": 7.45923266020567e-05, + "loss": 0.2871, + "step": 48720 + }, + { + "epoch": 1.881539827792579, + "grad_norm": 0.32539430260658264, + "learning_rate": 7.456658558245493e-05, + "loss": 0.2641, + "step": 48730 + }, + { + "epoch": 1.8819259430866055, + "grad_norm": 0.05262208729982376, + "learning_rate": 7.454084456285314e-05, + "loss": 0.2313, + "step": 48740 + }, + { + "epoch": 1.8823120583806325, + "grad_norm": 1.5337389707565308, + "learning_rate": 7.451510354325135e-05, + "loss": 0.2202, + "step": 48750 + }, + { + "epoch": 1.8826981736746593, + "grad_norm": 2.8400349617004395, + "learning_rate": 7.448936252364957e-05, + "loss": 0.2719, + "step": 48760 + }, + { + "epoch": 1.883084288968686, + "grad_norm": 1.0065114498138428, + "learning_rate": 7.446362150404778e-05, + "loss": 0.1904, + "step": 48770 + }, + { + "epoch": 1.8834704042627128, + "grad_norm": 3.514146089553833, + "learning_rate": 7.443788048444599e-05, + "loss": 0.4121, + "step": 48780 + }, + { + "epoch": 1.8838565195567396, + "grad_norm": 1.5249392986297607, + "learning_rate": 7.44121394648442e-05, + "loss": 0.3192, + "step": 48790 + }, + { + "epoch": 1.8842426348507666, + "grad_norm": 2.1075022220611572, + "learning_rate": 7.438639844524242e-05, + "loss": 0.257, + "step": 48800 + }, + { + "epoch": 1.8846287501447931, + "grad_norm": 1.368531584739685, + "learning_rate": 7.436065742564063e-05, + "loss": 0.246, + "step": 48810 + }, + { + "epoch": 1.8850148654388201, + "grad_norm": 0.7900007367134094, + "learning_rate": 7.433491640603885e-05, + "loss": 0.2635, + "step": 48820 + }, + { + "epoch": 1.8854009807328467, + "grad_norm": 0.8597519397735596, + "learning_rate": 7.430917538643706e-05, + "loss": 0.3996, + "step": 48830 + }, + { + "epoch": 1.8857870960268737, + "grad_norm": 1.4207600355148315, + "learning_rate": 7.428343436683527e-05, + "loss": 0.0985, + "step": 48840 + }, + { + "epoch": 1.8861732113209004, + "grad_norm": 0.43486616015434265, + "learning_rate": 7.425769334723349e-05, + "loss": 0.0675, + "step": 48850 + }, + { + "epoch": 1.8865593266149272, + "grad_norm": 0.5899690389633179, + "learning_rate": 7.42319523276317e-05, + "loss": 0.3184, + "step": 48860 + }, + { + "epoch": 1.886945441908954, + "grad_norm": 0.3843490481376648, + "learning_rate": 7.420621130802991e-05, + "loss": 0.3173, + "step": 48870 + }, + { + "epoch": 1.8873315572029807, + "grad_norm": 1.5994783639907837, + "learning_rate": 7.418047028842813e-05, + "loss": 0.1545, + "step": 48880 + }, + { + "epoch": 1.8877176724970077, + "grad_norm": 0.4738117456436157, + "learning_rate": 7.415472926882634e-05, + "loss": 0.2209, + "step": 48890 + }, + { + "epoch": 1.8881037877910343, + "grad_norm": 0.8965383768081665, + "learning_rate": 7.412898824922455e-05, + "loss": 0.0784, + "step": 48900 + }, + { + "epoch": 1.8884899030850613, + "grad_norm": 0.5122581124305725, + "learning_rate": 7.410324722962278e-05, + "loss": 0.2346, + "step": 48910 + }, + { + "epoch": 1.888876018379088, + "grad_norm": 0.8720236420631409, + "learning_rate": 7.407750621002099e-05, + "loss": 0.1616, + "step": 48920 + }, + { + "epoch": 1.8892621336731148, + "grad_norm": 0.8678966760635376, + "learning_rate": 7.405176519041919e-05, + "loss": 0.2346, + "step": 48930 + }, + { + "epoch": 1.8896482489671416, + "grad_norm": 1.5904022455215454, + "learning_rate": 7.40260241708174e-05, + "loss": 0.2383, + "step": 48940 + }, + { + "epoch": 1.8900343642611683, + "grad_norm": 0.8917766213417053, + "learning_rate": 7.400028315121562e-05, + "loss": 0.0761, + "step": 48950 + }, + { + "epoch": 1.8904204795551953, + "grad_norm": 2.0660765171051025, + "learning_rate": 7.397454213161383e-05, + "loss": 0.1417, + "step": 48960 + }, + { + "epoch": 1.8908065948492219, + "grad_norm": 1.085541009902954, + "learning_rate": 7.394880111201205e-05, + "loss": 0.116, + "step": 48970 + }, + { + "epoch": 1.8911927101432489, + "grad_norm": 0.5287320613861084, + "learning_rate": 7.392306009241027e-05, + "loss": 0.2035, + "step": 48980 + }, + { + "epoch": 1.8915788254372756, + "grad_norm": 0.12603731453418732, + "learning_rate": 7.389731907280849e-05, + "loss": 0.2288, + "step": 48990 + }, + { + "epoch": 1.8919649407313024, + "grad_norm": 1.6294454336166382, + "learning_rate": 7.387157805320669e-05, + "loss": 0.3106, + "step": 49000 + }, + { + "epoch": 1.8923510560253292, + "grad_norm": 0.12466654926538467, + "learning_rate": 7.38458370336049e-05, + "loss": 0.2728, + "step": 49010 + }, + { + "epoch": 1.892737171319356, + "grad_norm": 0.44524601101875305, + "learning_rate": 7.382009601400311e-05, + "loss": 0.1293, + "step": 49020 + }, + { + "epoch": 1.893123286613383, + "grad_norm": 0.7164571285247803, + "learning_rate": 7.379435499440133e-05, + "loss": 0.2331, + "step": 49030 + }, + { + "epoch": 1.8935094019074095, + "grad_norm": 0.6479294300079346, + "learning_rate": 7.376861397479955e-05, + "loss": 0.2716, + "step": 49040 + }, + { + "epoch": 1.8938955172014365, + "grad_norm": 0.5055733323097229, + "learning_rate": 7.374287295519777e-05, + "loss": 0.253, + "step": 49050 + }, + { + "epoch": 1.894281632495463, + "grad_norm": 0.14443042874336243, + "learning_rate": 7.371713193559598e-05, + "loss": 0.211, + "step": 49060 + }, + { + "epoch": 1.89466774778949, + "grad_norm": 0.21934077143669128, + "learning_rate": 7.369139091599418e-05, + "loss": 0.4524, + "step": 49070 + }, + { + "epoch": 1.8950538630835168, + "grad_norm": 0.7723036408424377, + "learning_rate": 7.366564989639239e-05, + "loss": 0.3333, + "step": 49080 + }, + { + "epoch": 1.8954399783775435, + "grad_norm": 0.561475932598114, + "learning_rate": 7.36399088767906e-05, + "loss": 0.2446, + "step": 49090 + }, + { + "epoch": 1.8958260936715703, + "grad_norm": 0.36831262707710266, + "learning_rate": 7.361416785718883e-05, + "loss": 0.3203, + "step": 49100 + }, + { + "epoch": 1.896212208965597, + "grad_norm": 1.3542941808700562, + "learning_rate": 7.358842683758705e-05, + "loss": 0.1927, + "step": 49110 + }, + { + "epoch": 1.896598324259624, + "grad_norm": 0.778232991695404, + "learning_rate": 7.356268581798526e-05, + "loss": 0.1714, + "step": 49120 + }, + { + "epoch": 1.8969844395536506, + "grad_norm": 0.1421511471271515, + "learning_rate": 7.353694479838347e-05, + "loss": 0.2391, + "step": 49130 + }, + { + "epoch": 1.8973705548476776, + "grad_norm": 0.5509871244430542, + "learning_rate": 7.351120377878169e-05, + "loss": 0.251, + "step": 49140 + }, + { + "epoch": 1.8977566701417043, + "grad_norm": 1.068138599395752, + "learning_rate": 7.348546275917989e-05, + "loss": 0.3913, + "step": 49150 + }, + { + "epoch": 1.8981427854357311, + "grad_norm": 0.8679132461547852, + "learning_rate": 7.345972173957811e-05, + "loss": 0.2243, + "step": 49160 + }, + { + "epoch": 1.8985289007297579, + "grad_norm": 1.1966150999069214, + "learning_rate": 7.343398071997633e-05, + "loss": 0.1426, + "step": 49170 + }, + { + "epoch": 1.8989150160237847, + "grad_norm": 2.415524482727051, + "learning_rate": 7.340823970037454e-05, + "loss": 0.2193, + "step": 49180 + }, + { + "epoch": 1.8993011313178116, + "grad_norm": 0.3287011384963989, + "learning_rate": 7.338249868077275e-05, + "loss": 0.1409, + "step": 49190 + }, + { + "epoch": 1.8996872466118382, + "grad_norm": 0.47298726439476013, + "learning_rate": 7.335675766117097e-05, + "loss": 0.3534, + "step": 49200 + }, + { + "epoch": 1.9000733619058652, + "grad_norm": 0.25453588366508484, + "learning_rate": 7.333101664156918e-05, + "loss": 0.2457, + "step": 49210 + }, + { + "epoch": 1.9004594771998917, + "grad_norm": 1.78682279586792, + "learning_rate": 7.330527562196738e-05, + "loss": 0.4379, + "step": 49220 + }, + { + "epoch": 1.9008455924939187, + "grad_norm": 0.9073999524116516, + "learning_rate": 7.32795346023656e-05, + "loss": 0.27, + "step": 49230 + }, + { + "epoch": 1.9012317077879455, + "grad_norm": 0.7788071036338806, + "learning_rate": 7.325379358276382e-05, + "loss": 0.1992, + "step": 49240 + }, + { + "epoch": 1.9016178230819722, + "grad_norm": 0.151946023106575, + "learning_rate": 7.322805256316203e-05, + "loss": 0.1526, + "step": 49250 + }, + { + "epoch": 1.9020039383759992, + "grad_norm": 1.655206561088562, + "learning_rate": 7.320231154356025e-05, + "loss": 0.2899, + "step": 49260 + }, + { + "epoch": 1.9023900536700258, + "grad_norm": 0.6136038303375244, + "learning_rate": 7.317657052395846e-05, + "loss": 0.2118, + "step": 49270 + }, + { + "epoch": 1.9027761689640528, + "grad_norm": 2.720750093460083, + "learning_rate": 7.315082950435667e-05, + "loss": 0.316, + "step": 49280 + }, + { + "epoch": 1.9031622842580793, + "grad_norm": 0.7502691149711609, + "learning_rate": 7.312508848475488e-05, + "loss": 0.2368, + "step": 49290 + }, + { + "epoch": 1.9035483995521063, + "grad_norm": 1.0178804397583008, + "learning_rate": 7.30993474651531e-05, + "loss": 0.1193, + "step": 49300 + }, + { + "epoch": 1.903934514846133, + "grad_norm": 1.0318552255630493, + "learning_rate": 7.307360644555131e-05, + "loss": 0.1236, + "step": 49310 + }, + { + "epoch": 1.9043206301401598, + "grad_norm": 0.4833224415779114, + "learning_rate": 7.304786542594952e-05, + "loss": 0.1505, + "step": 49320 + }, + { + "epoch": 1.9047067454341866, + "grad_norm": 0.8481758832931519, + "learning_rate": 7.302212440634774e-05, + "loss": 0.318, + "step": 49330 + }, + { + "epoch": 1.9050928607282134, + "grad_norm": 0.8141576051712036, + "learning_rate": 7.299638338674595e-05, + "loss": 0.2936, + "step": 49340 + }, + { + "epoch": 1.9054789760222404, + "grad_norm": 0.28531432151794434, + "learning_rate": 7.297064236714416e-05, + "loss": 0.0794, + "step": 49350 + }, + { + "epoch": 1.905865091316267, + "grad_norm": 1.2908906936645508, + "learning_rate": 7.294490134754238e-05, + "loss": 0.2753, + "step": 49360 + }, + { + "epoch": 1.906251206610294, + "grad_norm": 0.27395737171173096, + "learning_rate": 7.291916032794059e-05, + "loss": 0.0869, + "step": 49370 + }, + { + "epoch": 1.9066373219043207, + "grad_norm": 0.5168110728263855, + "learning_rate": 7.28934193083388e-05, + "loss": 0.2033, + "step": 49380 + }, + { + "epoch": 1.9070234371983474, + "grad_norm": 0.384120911359787, + "learning_rate": 7.286767828873702e-05, + "loss": 0.2012, + "step": 49390 + }, + { + "epoch": 1.9074095524923742, + "grad_norm": 0.86110919713974, + "learning_rate": 7.284193726913523e-05, + "loss": 0.254, + "step": 49400 + }, + { + "epoch": 1.907795667786401, + "grad_norm": 0.04631857946515083, + "learning_rate": 7.281619624953346e-05, + "loss": 0.1196, + "step": 49410 + }, + { + "epoch": 1.908181783080428, + "grad_norm": 0.1448020040988922, + "learning_rate": 7.279045522993166e-05, + "loss": 0.2028, + "step": 49420 + }, + { + "epoch": 1.9085678983744545, + "grad_norm": 0.11122003197669983, + "learning_rate": 7.276471421032987e-05, + "loss": 0.2106, + "step": 49430 + }, + { + "epoch": 1.9089540136684815, + "grad_norm": 1.0059682130813599, + "learning_rate": 7.273897319072808e-05, + "loss": 0.2273, + "step": 49440 + }, + { + "epoch": 1.909340128962508, + "grad_norm": 0.5203434824943542, + "learning_rate": 7.27132321711263e-05, + "loss": 0.1357, + "step": 49450 + }, + { + "epoch": 1.909726244256535, + "grad_norm": 0.5997903347015381, + "learning_rate": 7.268749115152451e-05, + "loss": 0.204, + "step": 49460 + }, + { + "epoch": 1.9101123595505618, + "grad_norm": 0.1968044638633728, + "learning_rate": 7.266175013192272e-05, + "loss": 0.0851, + "step": 49470 + }, + { + "epoch": 1.9104984748445886, + "grad_norm": 2.3479251861572266, + "learning_rate": 7.263600911232095e-05, + "loss": 0.2232, + "step": 49480 + }, + { + "epoch": 1.9108845901386153, + "grad_norm": 1.1804332733154297, + "learning_rate": 7.261026809271916e-05, + "loss": 0.1622, + "step": 49490 + }, + { + "epoch": 1.911270705432642, + "grad_norm": 1.4964795112609863, + "learning_rate": 7.258452707311736e-05, + "loss": 0.2701, + "step": 49500 + }, + { + "epoch": 1.911656820726669, + "grad_norm": 2.1407668590545654, + "learning_rate": 7.255878605351558e-05, + "loss": 0.2944, + "step": 49510 + }, + { + "epoch": 1.9120429360206956, + "grad_norm": 0.5795183181762695, + "learning_rate": 7.253304503391379e-05, + "loss": 0.2761, + "step": 49520 + }, + { + "epoch": 1.9124290513147226, + "grad_norm": 0.8205333352088928, + "learning_rate": 7.2507304014312e-05, + "loss": 0.3897, + "step": 49530 + }, + { + "epoch": 1.9128151666087494, + "grad_norm": 4.536723613739014, + "learning_rate": 7.248156299471023e-05, + "loss": 0.3429, + "step": 49540 + }, + { + "epoch": 1.9132012819027762, + "grad_norm": 0.7611442804336548, + "learning_rate": 7.245582197510844e-05, + "loss": 0.3098, + "step": 49550 + }, + { + "epoch": 1.913587397196803, + "grad_norm": 0.7911695241928101, + "learning_rate": 7.243008095550666e-05, + "loss": 0.28, + "step": 49560 + }, + { + "epoch": 1.9139735124908297, + "grad_norm": 0.11662279069423676, + "learning_rate": 7.240433993590486e-05, + "loss": 0.1577, + "step": 49570 + }, + { + "epoch": 1.9143596277848567, + "grad_norm": 2.140101194381714, + "learning_rate": 7.237859891630307e-05, + "loss": 0.3705, + "step": 49580 + }, + { + "epoch": 1.9147457430788832, + "grad_norm": 1.8803783655166626, + "learning_rate": 7.235285789670128e-05, + "loss": 0.3168, + "step": 49590 + }, + { + "epoch": 1.9151318583729102, + "grad_norm": 0.25436753034591675, + "learning_rate": 7.232711687709951e-05, + "loss": 0.3037, + "step": 49600 + }, + { + "epoch": 1.915517973666937, + "grad_norm": 1.1993011236190796, + "learning_rate": 7.230137585749772e-05, + "loss": 0.2218, + "step": 49610 + }, + { + "epoch": 1.9159040889609638, + "grad_norm": 0.4995238780975342, + "learning_rate": 7.227563483789594e-05, + "loss": 0.1821, + "step": 49620 + }, + { + "epoch": 1.9162902042549905, + "grad_norm": 1.8584142923355103, + "learning_rate": 7.224989381829415e-05, + "loss": 0.2694, + "step": 49630 + }, + { + "epoch": 1.9166763195490173, + "grad_norm": 0.5736681818962097, + "learning_rate": 7.222415279869235e-05, + "loss": 0.32, + "step": 49640 + }, + { + "epoch": 1.9170624348430443, + "grad_norm": 0.2963573634624481, + "learning_rate": 7.219841177909056e-05, + "loss": 0.1986, + "step": 49650 + }, + { + "epoch": 1.9174485501370708, + "grad_norm": 0.6248067617416382, + "learning_rate": 7.217267075948879e-05, + "loss": 0.2461, + "step": 49660 + }, + { + "epoch": 1.9178346654310978, + "grad_norm": 1.037135362625122, + "learning_rate": 7.2146929739887e-05, + "loss": 0.176, + "step": 49670 + }, + { + "epoch": 1.9182207807251244, + "grad_norm": 7.093240261077881, + "learning_rate": 7.212118872028522e-05, + "loss": 0.263, + "step": 49680 + }, + { + "epoch": 1.9186068960191514, + "grad_norm": 0.9378503561019897, + "learning_rate": 7.209544770068343e-05, + "loss": 0.4105, + "step": 49690 + }, + { + "epoch": 1.9189930113131781, + "grad_norm": 2.801452875137329, + "learning_rate": 7.206970668108164e-05, + "loss": 0.1693, + "step": 49700 + }, + { + "epoch": 1.9193791266072049, + "grad_norm": 0.6257158517837524, + "learning_rate": 7.204396566147984e-05, + "loss": 0.2095, + "step": 49710 + }, + { + "epoch": 1.9197652419012317, + "grad_norm": 1.0623574256896973, + "learning_rate": 7.201822464187807e-05, + "loss": 0.2335, + "step": 49720 + }, + { + "epoch": 1.9201513571952584, + "grad_norm": 1.958388328552246, + "learning_rate": 7.199248362227628e-05, + "loss": 0.2545, + "step": 49730 + }, + { + "epoch": 1.9205374724892854, + "grad_norm": 1.790643334388733, + "learning_rate": 7.19667426026745e-05, + "loss": 0.2342, + "step": 49740 + }, + { + "epoch": 1.920923587783312, + "grad_norm": 0.19096235930919647, + "learning_rate": 7.194100158307271e-05, + "loss": 0.1812, + "step": 49750 + }, + { + "epoch": 1.921309703077339, + "grad_norm": 1.8985120058059692, + "learning_rate": 7.191526056347092e-05, + "loss": 0.294, + "step": 49760 + }, + { + "epoch": 1.9216958183713657, + "grad_norm": 2.9629859924316406, + "learning_rate": 7.188951954386914e-05, + "loss": 0.3423, + "step": 49770 + }, + { + "epoch": 1.9220819336653925, + "grad_norm": 1.475262999534607, + "learning_rate": 7.186377852426735e-05, + "loss": 0.1478, + "step": 49780 + }, + { + "epoch": 1.9224680489594193, + "grad_norm": 1.5917531251907349, + "learning_rate": 7.183803750466556e-05, + "loss": 0.1964, + "step": 49790 + }, + { + "epoch": 1.922854164253446, + "grad_norm": 0.20593854784965515, + "learning_rate": 7.181229648506378e-05, + "loss": 0.1775, + "step": 49800 + }, + { + "epoch": 1.923240279547473, + "grad_norm": 0.5509443879127502, + "learning_rate": 7.178655546546199e-05, + "loss": 0.2411, + "step": 49810 + }, + { + "epoch": 1.9236263948414996, + "grad_norm": 0.8016191720962524, + "learning_rate": 7.17608144458602e-05, + "loss": 0.0837, + "step": 49820 + }, + { + "epoch": 1.9240125101355265, + "grad_norm": 0.8763396143913269, + "learning_rate": 7.173507342625842e-05, + "loss": 0.1932, + "step": 49830 + }, + { + "epoch": 1.9243986254295533, + "grad_norm": 0.25457191467285156, + "learning_rate": 7.170933240665663e-05, + "loss": 0.0869, + "step": 49840 + }, + { + "epoch": 1.92478474072358, + "grad_norm": 2.61993145942688, + "learning_rate": 7.168359138705484e-05, + "loss": 0.1839, + "step": 49850 + }, + { + "epoch": 1.9251708560176068, + "grad_norm": 2.2821877002716064, + "learning_rate": 7.165785036745306e-05, + "loss": 0.3863, + "step": 49860 + }, + { + "epoch": 1.9255569713116336, + "grad_norm": 1.4072798490524292, + "learning_rate": 7.163210934785127e-05, + "loss": 0.2477, + "step": 49870 + }, + { + "epoch": 1.9259430866056606, + "grad_norm": 0.8962070941925049, + "learning_rate": 7.160636832824948e-05, + "loss": 0.2936, + "step": 49880 + }, + { + "epoch": 1.9263292018996871, + "grad_norm": 3.0801923274993896, + "learning_rate": 7.15806273086477e-05, + "loss": 0.2368, + "step": 49890 + }, + { + "epoch": 1.9267153171937141, + "grad_norm": 0.5756659507751465, + "learning_rate": 7.155488628904591e-05, + "loss": 0.2774, + "step": 49900 + }, + { + "epoch": 1.9271014324877407, + "grad_norm": 2.669837236404419, + "learning_rate": 7.152914526944414e-05, + "loss": 0.2742, + "step": 49910 + }, + { + "epoch": 1.9274875477817677, + "grad_norm": 1.3064197301864624, + "learning_rate": 7.150340424984234e-05, + "loss": 0.3908, + "step": 49920 + }, + { + "epoch": 1.9278736630757944, + "grad_norm": 1.54086434841156, + "learning_rate": 7.147766323024055e-05, + "loss": 0.2302, + "step": 49930 + }, + { + "epoch": 1.9282597783698212, + "grad_norm": 2.6577224731445312, + "learning_rate": 7.145192221063876e-05, + "loss": 0.2667, + "step": 49940 + }, + { + "epoch": 1.928645893663848, + "grad_norm": 1.0387258529663086, + "learning_rate": 7.142618119103698e-05, + "loss": 0.2418, + "step": 49950 + }, + { + "epoch": 1.9290320089578747, + "grad_norm": 0.592282772064209, + "learning_rate": 7.140044017143519e-05, + "loss": 0.2499, + "step": 49960 + }, + { + "epoch": 1.9294181242519017, + "grad_norm": 0.364241361618042, + "learning_rate": 7.137469915183342e-05, + "loss": 0.2056, + "step": 49970 + }, + { + "epoch": 1.9298042395459283, + "grad_norm": 2.9593188762664795, + "learning_rate": 7.134895813223163e-05, + "loss": 0.2514, + "step": 49980 + }, + { + "epoch": 1.9301903548399553, + "grad_norm": 1.9135371446609497, + "learning_rate": 7.132321711262983e-05, + "loss": 0.124, + "step": 49990 + }, + { + "epoch": 1.930576470133982, + "grad_norm": 0.5927162170410156, + "learning_rate": 7.129747609302804e-05, + "loss": 0.2257, + "step": 50000 + }, + { + "epoch": 1.9309625854280088, + "grad_norm": 1.7300679683685303, + "learning_rate": 7.127173507342626e-05, + "loss": 0.2302, + "step": 50010 + }, + { + "epoch": 1.9313487007220356, + "grad_norm": 1.48344087600708, + "learning_rate": 7.124599405382447e-05, + "loss": 0.3121, + "step": 50020 + }, + { + "epoch": 1.9317348160160623, + "grad_norm": 1.275780439376831, + "learning_rate": 7.122025303422268e-05, + "loss": 0.2202, + "step": 50030 + }, + { + "epoch": 1.9321209313100893, + "grad_norm": 1.2625102996826172, + "learning_rate": 7.119451201462091e-05, + "loss": 0.1819, + "step": 50040 + }, + { + "epoch": 1.9325070466041159, + "grad_norm": 2.6306488513946533, + "learning_rate": 7.116877099501912e-05, + "loss": 0.3846, + "step": 50050 + }, + { + "epoch": 1.9328931618981429, + "grad_norm": 2.507249355316162, + "learning_rate": 7.114302997541732e-05, + "loss": 0.3167, + "step": 50060 + }, + { + "epoch": 1.9332792771921696, + "grad_norm": 0.6122744679450989, + "learning_rate": 7.111728895581554e-05, + "loss": 0.2486, + "step": 50070 + }, + { + "epoch": 1.9336653924861964, + "grad_norm": 0.9299182295799255, + "learning_rate": 7.109154793621375e-05, + "loss": 0.2156, + "step": 50080 + }, + { + "epoch": 1.9340515077802232, + "grad_norm": 2.0560238361358643, + "learning_rate": 7.106580691661196e-05, + "loss": 0.1319, + "step": 50090 + }, + { + "epoch": 1.93443762307425, + "grad_norm": 0.9602612257003784, + "learning_rate": 7.104006589701019e-05, + "loss": 0.1433, + "step": 50100 + }, + { + "epoch": 1.934823738368277, + "grad_norm": 1.2760334014892578, + "learning_rate": 7.10143248774084e-05, + "loss": 0.2019, + "step": 50110 + }, + { + "epoch": 1.9352098536623035, + "grad_norm": 0.27404239773750305, + "learning_rate": 7.098858385780662e-05, + "loss": 0.5303, + "step": 50120 + }, + { + "epoch": 1.9355959689563305, + "grad_norm": 0.6597281694412231, + "learning_rate": 7.096284283820483e-05, + "loss": 0.2328, + "step": 50130 + }, + { + "epoch": 1.935982084250357, + "grad_norm": 3.6417131423950195, + "learning_rate": 7.093710181860303e-05, + "loss": 0.343, + "step": 50140 + }, + { + "epoch": 1.936368199544384, + "grad_norm": 0.9950355887413025, + "learning_rate": 7.091136079900124e-05, + "loss": 0.1877, + "step": 50150 + }, + { + "epoch": 1.9367543148384108, + "grad_norm": 0.3848172426223755, + "learning_rate": 7.088561977939947e-05, + "loss": 0.1541, + "step": 50160 + }, + { + "epoch": 1.9371404301324375, + "grad_norm": 0.6884573698043823, + "learning_rate": 7.085987875979768e-05, + "loss": 0.1744, + "step": 50170 + }, + { + "epoch": 1.9375265454264643, + "grad_norm": 1.5233834981918335, + "learning_rate": 7.08341377401959e-05, + "loss": 0.3595, + "step": 50180 + }, + { + "epoch": 1.937912660720491, + "grad_norm": 0.11241710186004639, + "learning_rate": 7.080839672059411e-05, + "loss": 0.1313, + "step": 50190 + }, + { + "epoch": 1.938298776014518, + "grad_norm": 1.3029096126556396, + "learning_rate": 7.078265570099232e-05, + "loss": 0.3459, + "step": 50200 + }, + { + "epoch": 1.9386848913085446, + "grad_norm": 1.1150782108306885, + "learning_rate": 7.075691468139052e-05, + "loss": 0.2662, + "step": 50210 + }, + { + "epoch": 1.9390710066025716, + "grad_norm": 0.09661692380905151, + "learning_rate": 7.073117366178875e-05, + "loss": 0.2377, + "step": 50220 + }, + { + "epoch": 1.9394571218965984, + "grad_norm": 2.488790512084961, + "learning_rate": 7.070543264218696e-05, + "loss": 0.2677, + "step": 50230 + }, + { + "epoch": 1.9398432371906251, + "grad_norm": 0.407704621553421, + "learning_rate": 7.067969162258518e-05, + "loss": 0.1208, + "step": 50240 + }, + { + "epoch": 1.940229352484652, + "grad_norm": 0.4769364297389984, + "learning_rate": 7.065395060298339e-05, + "loss": 0.1748, + "step": 50250 + }, + { + "epoch": 1.9406154677786787, + "grad_norm": 2.900118112564087, + "learning_rate": 7.06282095833816e-05, + "loss": 0.1948, + "step": 50260 + }, + { + "epoch": 1.9410015830727056, + "grad_norm": 1.1749001741409302, + "learning_rate": 7.060246856377982e-05, + "loss": 0.1688, + "step": 50270 + }, + { + "epoch": 1.9413876983667322, + "grad_norm": 0.5052315592765808, + "learning_rate": 7.057672754417802e-05, + "loss": 0.4284, + "step": 50280 + }, + { + "epoch": 1.9417738136607592, + "grad_norm": 0.614936113357544, + "learning_rate": 7.055098652457624e-05, + "loss": 0.2109, + "step": 50290 + }, + { + "epoch": 1.942159928954786, + "grad_norm": 1.9683163166046143, + "learning_rate": 7.052524550497446e-05, + "loss": 0.1518, + "step": 50300 + }, + { + "epoch": 1.9425460442488127, + "grad_norm": 0.8502413630485535, + "learning_rate": 7.049950448537267e-05, + "loss": 0.281, + "step": 50310 + }, + { + "epoch": 1.9429321595428395, + "grad_norm": 2.8081016540527344, + "learning_rate": 7.047376346577088e-05, + "loss": 0.1802, + "step": 50320 + }, + { + "epoch": 1.9433182748368663, + "grad_norm": 1.9322141408920288, + "learning_rate": 7.04480224461691e-05, + "loss": 0.2218, + "step": 50330 + }, + { + "epoch": 1.9437043901308932, + "grad_norm": 0.8338032960891724, + "learning_rate": 7.042228142656731e-05, + "loss": 0.1836, + "step": 50340 + }, + { + "epoch": 1.9440905054249198, + "grad_norm": 1.232925295829773, + "learning_rate": 7.039654040696552e-05, + "loss": 0.2025, + "step": 50350 + }, + { + "epoch": 1.9444766207189468, + "grad_norm": 0.5655641555786133, + "learning_rate": 7.037079938736374e-05, + "loss": 0.2061, + "step": 50360 + }, + { + "epoch": 1.9448627360129733, + "grad_norm": 1.398917317390442, + "learning_rate": 7.034505836776195e-05, + "loss": 0.3471, + "step": 50370 + }, + { + "epoch": 1.9452488513070003, + "grad_norm": 0.8988509178161621, + "learning_rate": 7.031931734816016e-05, + "loss": 0.3504, + "step": 50380 + }, + { + "epoch": 1.945634966601027, + "grad_norm": 0.10333681106567383, + "learning_rate": 7.029357632855838e-05, + "loss": 0.352, + "step": 50390 + }, + { + "epoch": 1.9460210818950539, + "grad_norm": 1.3678967952728271, + "learning_rate": 7.026783530895659e-05, + "loss": 0.1763, + "step": 50400 + }, + { + "epoch": 1.9464071971890806, + "grad_norm": 3.1605618000030518, + "learning_rate": 7.02420942893548e-05, + "loss": 0.4636, + "step": 50410 + }, + { + "epoch": 1.9467933124831074, + "grad_norm": 0.02575235441327095, + "learning_rate": 7.021635326975302e-05, + "loss": 0.2352, + "step": 50420 + }, + { + "epoch": 1.9471794277771344, + "grad_norm": 1.789573311805725, + "learning_rate": 7.019061225015123e-05, + "loss": 0.3358, + "step": 50430 + }, + { + "epoch": 1.947565543071161, + "grad_norm": 1.0206273794174194, + "learning_rate": 7.016487123054944e-05, + "loss": 0.1509, + "step": 50440 + }, + { + "epoch": 1.947951658365188, + "grad_norm": 3.0096218585968018, + "learning_rate": 7.013913021094766e-05, + "loss": 0.3393, + "step": 50450 + }, + { + "epoch": 1.9483377736592147, + "grad_norm": 0.7118330001831055, + "learning_rate": 7.011338919134587e-05, + "loss": 0.2159, + "step": 50460 + }, + { + "epoch": 1.9487238889532414, + "grad_norm": 0.2753995954990387, + "learning_rate": 7.00876481717441e-05, + "loss": 0.2659, + "step": 50470 + }, + { + "epoch": 1.9491100042472682, + "grad_norm": 2.5077409744262695, + "learning_rate": 7.006190715214231e-05, + "loss": 0.3032, + "step": 50480 + }, + { + "epoch": 1.949496119541295, + "grad_norm": 0.3444388806819916, + "learning_rate": 7.003616613254051e-05, + "loss": 0.2625, + "step": 50490 + }, + { + "epoch": 1.949882234835322, + "grad_norm": 1.1488401889801025, + "learning_rate": 7.001042511293872e-05, + "loss": 0.2443, + "step": 50500 + }, + { + "epoch": 1.9502683501293485, + "grad_norm": 0.6464126706123352, + "learning_rate": 6.998468409333694e-05, + "loss": 0.1268, + "step": 50510 + }, + { + "epoch": 1.9506544654233755, + "grad_norm": 0.6716893911361694, + "learning_rate": 6.995894307373515e-05, + "loss": 0.1346, + "step": 50520 + }, + { + "epoch": 1.951040580717402, + "grad_norm": 1.7599986791610718, + "learning_rate": 6.993320205413336e-05, + "loss": 0.2365, + "step": 50530 + }, + { + "epoch": 1.951426696011429, + "grad_norm": 0.7483705282211304, + "learning_rate": 6.990746103453159e-05, + "loss": 0.326, + "step": 50540 + }, + { + "epoch": 1.9518128113054558, + "grad_norm": 1.9978541135787964, + "learning_rate": 6.98817200149298e-05, + "loss": 0.2166, + "step": 50550 + }, + { + "epoch": 1.9521989265994826, + "grad_norm": 0.50310218334198, + "learning_rate": 6.9855978995328e-05, + "loss": 0.0868, + "step": 50560 + }, + { + "epoch": 1.9525850418935096, + "grad_norm": 0.4358873963356018, + "learning_rate": 6.983023797572622e-05, + "loss": 0.2483, + "step": 50570 + }, + { + "epoch": 1.952971157187536, + "grad_norm": 1.1612942218780518, + "learning_rate": 6.980449695612443e-05, + "loss": 0.1993, + "step": 50580 + }, + { + "epoch": 1.953357272481563, + "grad_norm": 2.2015364170074463, + "learning_rate": 6.977875593652264e-05, + "loss": 0.3102, + "step": 50590 + }, + { + "epoch": 1.9537433877755896, + "grad_norm": 0.2711980938911438, + "learning_rate": 6.975301491692087e-05, + "loss": 0.1243, + "step": 50600 + }, + { + "epoch": 1.9541295030696166, + "grad_norm": 1.2215690612792969, + "learning_rate": 6.972727389731908e-05, + "loss": 0.1812, + "step": 50610 + }, + { + "epoch": 1.9545156183636434, + "grad_norm": 1.5580382347106934, + "learning_rate": 6.97015328777173e-05, + "loss": 0.2582, + "step": 50620 + }, + { + "epoch": 1.9549017336576702, + "grad_norm": 4.351020336151123, + "learning_rate": 6.96757918581155e-05, + "loss": 0.2451, + "step": 50630 + }, + { + "epoch": 1.955287848951697, + "grad_norm": 0.8245100975036621, + "learning_rate": 6.965005083851371e-05, + "loss": 0.0861, + "step": 50640 + }, + { + "epoch": 1.9556739642457237, + "grad_norm": 3.2745001316070557, + "learning_rate": 6.962430981891192e-05, + "loss": 0.3108, + "step": 50650 + }, + { + "epoch": 1.9560600795397507, + "grad_norm": 1.2234485149383545, + "learning_rate": 6.959856879931015e-05, + "loss": 0.2457, + "step": 50660 + }, + { + "epoch": 1.9564461948337772, + "grad_norm": 0.8801009058952332, + "learning_rate": 6.957282777970836e-05, + "loss": 0.1758, + "step": 50670 + }, + { + "epoch": 1.9568323101278042, + "grad_norm": 1.0892245769500732, + "learning_rate": 6.954708676010658e-05, + "loss": 0.217, + "step": 50680 + }, + { + "epoch": 1.957218425421831, + "grad_norm": 0.47810041904449463, + "learning_rate": 6.952134574050479e-05, + "loss": 0.1797, + "step": 50690 + }, + { + "epoch": 1.9576045407158578, + "grad_norm": 2.988180160522461, + "learning_rate": 6.9495604720903e-05, + "loss": 0.2899, + "step": 50700 + }, + { + "epoch": 1.9579906560098845, + "grad_norm": 1.7291783094406128, + "learning_rate": 6.94698637013012e-05, + "loss": 0.214, + "step": 50710 + }, + { + "epoch": 1.9583767713039113, + "grad_norm": 1.1876074075698853, + "learning_rate": 6.944412268169943e-05, + "loss": 0.1801, + "step": 50720 + }, + { + "epoch": 1.9587628865979383, + "grad_norm": 1.5710748434066772, + "learning_rate": 6.941838166209764e-05, + "loss": 0.393, + "step": 50730 + }, + { + "epoch": 1.9591490018919648, + "grad_norm": 1.422935128211975, + "learning_rate": 6.939264064249586e-05, + "loss": 0.2637, + "step": 50740 + }, + { + "epoch": 1.9595351171859918, + "grad_norm": 0.26971349120140076, + "learning_rate": 6.936689962289407e-05, + "loss": 0.2616, + "step": 50750 + }, + { + "epoch": 1.9599212324800184, + "grad_norm": 0.02176385000348091, + "learning_rate": 6.934115860329228e-05, + "loss": 0.2458, + "step": 50760 + }, + { + "epoch": 1.9603073477740454, + "grad_norm": 1.587498664855957, + "learning_rate": 6.93154175836905e-05, + "loss": 0.2505, + "step": 50770 + }, + { + "epoch": 1.9606934630680721, + "grad_norm": 0.7178042531013489, + "learning_rate": 6.92896765640887e-05, + "loss": 0.1028, + "step": 50780 + }, + { + "epoch": 1.961079578362099, + "grad_norm": 0.4361552894115448, + "learning_rate": 6.926393554448692e-05, + "loss": 0.1327, + "step": 50790 + }, + { + "epoch": 1.9614656936561257, + "grad_norm": 0.1252552568912506, + "learning_rate": 6.923819452488514e-05, + "loss": 0.2598, + "step": 50800 + }, + { + "epoch": 1.9618518089501524, + "grad_norm": 1.0288604497909546, + "learning_rate": 6.921245350528335e-05, + "loss": 0.1415, + "step": 50810 + }, + { + "epoch": 1.9622379242441794, + "grad_norm": 1.633277416229248, + "learning_rate": 6.918671248568156e-05, + "loss": 0.3346, + "step": 50820 + }, + { + "epoch": 1.962624039538206, + "grad_norm": 1.034558653831482, + "learning_rate": 6.916097146607978e-05, + "loss": 0.3273, + "step": 50830 + }, + { + "epoch": 1.963010154832233, + "grad_norm": 0.4945419132709503, + "learning_rate": 6.913523044647799e-05, + "loss": 0.2027, + "step": 50840 + }, + { + "epoch": 1.9633962701262597, + "grad_norm": 0.49989691376686096, + "learning_rate": 6.91094894268762e-05, + "loss": 0.3082, + "step": 50850 + }, + { + "epoch": 1.9637823854202865, + "grad_norm": 1.8456840515136719, + "learning_rate": 6.908374840727442e-05, + "loss": 0.102, + "step": 50860 + }, + { + "epoch": 1.9641685007143133, + "grad_norm": 1.256460428237915, + "learning_rate": 6.905800738767263e-05, + "loss": 0.1375, + "step": 50870 + }, + { + "epoch": 1.96455461600834, + "grad_norm": 2.0318634510040283, + "learning_rate": 6.903226636807084e-05, + "loss": 0.2662, + "step": 50880 + }, + { + "epoch": 1.964940731302367, + "grad_norm": 0.6381733417510986, + "learning_rate": 6.900652534846906e-05, + "loss": 0.176, + "step": 50890 + }, + { + "epoch": 1.9653268465963936, + "grad_norm": 0.2860821485519409, + "learning_rate": 6.898078432886727e-05, + "loss": 0.1307, + "step": 50900 + }, + { + "epoch": 1.9657129618904206, + "grad_norm": 1.2890506982803345, + "learning_rate": 6.895504330926548e-05, + "loss": 0.1305, + "step": 50910 + }, + { + "epoch": 1.9660990771844473, + "grad_norm": 0.13067105412483215, + "learning_rate": 6.89293022896637e-05, + "loss": 0.1777, + "step": 50920 + }, + { + "epoch": 1.966485192478474, + "grad_norm": 0.7632800340652466, + "learning_rate": 6.890356127006191e-05, + "loss": 0.1248, + "step": 50930 + }, + { + "epoch": 1.9668713077725009, + "grad_norm": 0.10640933364629745, + "learning_rate": 6.887782025046012e-05, + "loss": 0.2368, + "step": 50940 + }, + { + "epoch": 1.9672574230665276, + "grad_norm": 0.8060460686683655, + "learning_rate": 6.885207923085834e-05, + "loss": 0.1205, + "step": 50950 + }, + { + "epoch": 1.9676435383605546, + "grad_norm": 0.028001902624964714, + "learning_rate": 6.882633821125655e-05, + "loss": 0.1134, + "step": 50960 + }, + { + "epoch": 1.9680296536545812, + "grad_norm": 1.236852765083313, + "learning_rate": 6.880059719165478e-05, + "loss": 0.3189, + "step": 50970 + }, + { + "epoch": 1.9684157689486081, + "grad_norm": 2.090635299682617, + "learning_rate": 6.877485617205298e-05, + "loss": 0.265, + "step": 50980 + }, + { + "epoch": 1.9688018842426347, + "grad_norm": 1.046667218208313, + "learning_rate": 6.874911515245119e-05, + "loss": 0.2501, + "step": 50990 + }, + { + "epoch": 1.9691879995366617, + "grad_norm": 2.0770325660705566, + "learning_rate": 6.87233741328494e-05, + "loss": 0.2136, + "step": 51000 + }, + { + "epoch": 1.9695741148306884, + "grad_norm": 0.7211881279945374, + "learning_rate": 6.869763311324762e-05, + "loss": 0.2434, + "step": 51010 + }, + { + "epoch": 1.9699602301247152, + "grad_norm": 0.8447550535202026, + "learning_rate": 6.867189209364583e-05, + "loss": 0.3904, + "step": 51020 + }, + { + "epoch": 1.970346345418742, + "grad_norm": 0.2205502986907959, + "learning_rate": 6.864615107404406e-05, + "loss": 0.1804, + "step": 51030 + }, + { + "epoch": 1.9707324607127688, + "grad_norm": 0.5322203636169434, + "learning_rate": 6.862041005444227e-05, + "loss": 0.2106, + "step": 51040 + }, + { + "epoch": 1.9711185760067957, + "grad_norm": 3.1091675758361816, + "learning_rate": 6.859466903484048e-05, + "loss": 0.3203, + "step": 51050 + }, + { + "epoch": 1.9715046913008223, + "grad_norm": 2.670405864715576, + "learning_rate": 6.856892801523868e-05, + "loss": 0.2011, + "step": 51060 + }, + { + "epoch": 1.9718908065948493, + "grad_norm": 1.0524908304214478, + "learning_rate": 6.85431869956369e-05, + "loss": 0.1144, + "step": 51070 + }, + { + "epoch": 1.972276921888876, + "grad_norm": 3.9005608558654785, + "learning_rate": 6.851744597603511e-05, + "loss": 0.3813, + "step": 51080 + }, + { + "epoch": 1.9726630371829028, + "grad_norm": 1.1112456321716309, + "learning_rate": 6.849170495643332e-05, + "loss": 0.1755, + "step": 51090 + }, + { + "epoch": 1.9730491524769296, + "grad_norm": 0.5004397630691528, + "learning_rate": 6.846596393683155e-05, + "loss": 0.17, + "step": 51100 + }, + { + "epoch": 1.9734352677709563, + "grad_norm": 0.7849172353744507, + "learning_rate": 6.844022291722976e-05, + "loss": 0.1669, + "step": 51110 + }, + { + "epoch": 1.9738213830649833, + "grad_norm": 0.7871361970901489, + "learning_rate": 6.841448189762798e-05, + "loss": 0.1466, + "step": 51120 + }, + { + "epoch": 1.9742074983590099, + "grad_norm": 1.1090983152389526, + "learning_rate": 6.838874087802617e-05, + "loss": 0.117, + "step": 51130 + }, + { + "epoch": 1.9745936136530369, + "grad_norm": 0.8283473253250122, + "learning_rate": 6.836299985842439e-05, + "loss": 0.1792, + "step": 51140 + }, + { + "epoch": 1.9749797289470636, + "grad_norm": 1.0861999988555908, + "learning_rate": 6.83372588388226e-05, + "loss": 0.1357, + "step": 51150 + }, + { + "epoch": 1.9753658442410904, + "grad_norm": 1.0975921154022217, + "learning_rate": 6.831151781922083e-05, + "loss": 0.2618, + "step": 51160 + }, + { + "epoch": 1.9757519595351172, + "grad_norm": 1.579583764076233, + "learning_rate": 6.828577679961904e-05, + "loss": 0.3458, + "step": 51170 + }, + { + "epoch": 1.976138074829144, + "grad_norm": 0.48704493045806885, + "learning_rate": 6.826003578001726e-05, + "loss": 0.1892, + "step": 51180 + }, + { + "epoch": 1.976524190123171, + "grad_norm": 0.4196261465549469, + "learning_rate": 6.823429476041547e-05, + "loss": 0.2072, + "step": 51190 + }, + { + "epoch": 1.9769103054171975, + "grad_norm": 0.5368069410324097, + "learning_rate": 6.820855374081367e-05, + "loss": 0.1758, + "step": 51200 + }, + { + "epoch": 1.9772964207112245, + "grad_norm": 3.1401877403259277, + "learning_rate": 6.818281272121188e-05, + "loss": 0.2463, + "step": 51210 + }, + { + "epoch": 1.977682536005251, + "grad_norm": 2.333087682723999, + "learning_rate": 6.815707170161011e-05, + "loss": 0.1826, + "step": 51220 + }, + { + "epoch": 1.978068651299278, + "grad_norm": 0.4700605869293213, + "learning_rate": 6.813133068200832e-05, + "loss": 0.1761, + "step": 51230 + }, + { + "epoch": 1.9784547665933048, + "grad_norm": 0.23108141124248505, + "learning_rate": 6.810558966240653e-05, + "loss": 0.3299, + "step": 51240 + }, + { + "epoch": 1.9788408818873315, + "grad_norm": 1.1515973806381226, + "learning_rate": 6.807984864280475e-05, + "loss": 0.2352, + "step": 51250 + }, + { + "epoch": 1.9792269971813583, + "grad_norm": 3.1671624183654785, + "learning_rate": 6.805410762320296e-05, + "loss": 0.18, + "step": 51260 + }, + { + "epoch": 1.979613112475385, + "grad_norm": 1.9045623540878296, + "learning_rate": 6.802836660360116e-05, + "loss": 0.1498, + "step": 51270 + }, + { + "epoch": 1.979999227769412, + "grad_norm": 3.6761367321014404, + "learning_rate": 6.800262558399939e-05, + "loss": 0.3242, + "step": 51280 + }, + { + "epoch": 1.9803853430634386, + "grad_norm": 0.8505986332893372, + "learning_rate": 6.79768845643976e-05, + "loss": 0.1563, + "step": 51290 + }, + { + "epoch": 1.9807714583574656, + "grad_norm": 0.5191363096237183, + "learning_rate": 6.795114354479581e-05, + "loss": 0.1699, + "step": 51300 + }, + { + "epoch": 1.9811575736514924, + "grad_norm": 1.0205559730529785, + "learning_rate": 6.792540252519403e-05, + "loss": 0.1785, + "step": 51310 + }, + { + "epoch": 1.9815436889455191, + "grad_norm": 0.929551899433136, + "learning_rate": 6.789966150559224e-05, + "loss": 0.2618, + "step": 51320 + }, + { + "epoch": 1.981929804239546, + "grad_norm": 0.3799718916416168, + "learning_rate": 6.787392048599045e-05, + "loss": 0.2574, + "step": 51330 + }, + { + "epoch": 1.9823159195335727, + "grad_norm": 0.8543326258659363, + "learning_rate": 6.784817946638867e-05, + "loss": 0.2277, + "step": 51340 + }, + { + "epoch": 1.9827020348275997, + "grad_norm": 1.2967779636383057, + "learning_rate": 6.782243844678688e-05, + "loss": 0.1693, + "step": 51350 + }, + { + "epoch": 1.9830881501216262, + "grad_norm": 0.9887800216674805, + "learning_rate": 6.77966974271851e-05, + "loss": 0.151, + "step": 51360 + }, + { + "epoch": 1.9834742654156532, + "grad_norm": 1.197924256324768, + "learning_rate": 6.777095640758331e-05, + "loss": 0.2184, + "step": 51370 + }, + { + "epoch": 1.98386038070968, + "grad_norm": 1.3980039358139038, + "learning_rate": 6.774521538798152e-05, + "loss": 0.3999, + "step": 51380 + }, + { + "epoch": 1.9842464960037067, + "grad_norm": 2.041222095489502, + "learning_rate": 6.771947436837973e-05, + "loss": 0.203, + "step": 51390 + }, + { + "epoch": 1.9846326112977335, + "grad_norm": 1.3461644649505615, + "learning_rate": 6.769373334877795e-05, + "loss": 0.2735, + "step": 51400 + }, + { + "epoch": 1.9850187265917603, + "grad_norm": 1.046863079071045, + "learning_rate": 6.766799232917616e-05, + "loss": 0.3281, + "step": 51410 + }, + { + "epoch": 1.9854048418857873, + "grad_norm": 1.3338502645492554, + "learning_rate": 6.764225130957437e-05, + "loss": 0.2538, + "step": 51420 + }, + { + "epoch": 1.9857909571798138, + "grad_norm": 1.3392490148544312, + "learning_rate": 6.761651028997259e-05, + "loss": 0.2737, + "step": 51430 + }, + { + "epoch": 1.9861770724738408, + "grad_norm": 1.5787503719329834, + "learning_rate": 6.75907692703708e-05, + "loss": 0.185, + "step": 51440 + }, + { + "epoch": 1.9865631877678673, + "grad_norm": 0.5758817791938782, + "learning_rate": 6.756502825076901e-05, + "loss": 0.1879, + "step": 51450 + }, + { + "epoch": 1.9869493030618943, + "grad_norm": 1.6144100427627563, + "learning_rate": 6.753928723116723e-05, + "loss": 0.2082, + "step": 51460 + }, + { + "epoch": 1.987335418355921, + "grad_norm": 0.7562010884284973, + "learning_rate": 6.751354621156545e-05, + "loss": 0.1859, + "step": 51470 + }, + { + "epoch": 1.9877215336499479, + "grad_norm": 1.3656364679336548, + "learning_rate": 6.748780519196365e-05, + "loss": 0.1591, + "step": 51480 + }, + { + "epoch": 1.9881076489439746, + "grad_norm": 2.7086987495422363, + "learning_rate": 6.746206417236187e-05, + "loss": 0.2364, + "step": 51490 + }, + { + "epoch": 1.9884937642380014, + "grad_norm": 2.2941534519195557, + "learning_rate": 6.743632315276008e-05, + "loss": 0.2207, + "step": 51500 + }, + { + "epoch": 1.9888798795320284, + "grad_norm": 1.2723060846328735, + "learning_rate": 6.74105821331583e-05, + "loss": 0.2617, + "step": 51510 + }, + { + "epoch": 1.989265994826055, + "grad_norm": 2.308548927307129, + "learning_rate": 6.738484111355651e-05, + "loss": 0.1709, + "step": 51520 + }, + { + "epoch": 1.989652110120082, + "grad_norm": 0.7583028078079224, + "learning_rate": 6.735910009395473e-05, + "loss": 0.2998, + "step": 51530 + }, + { + "epoch": 1.9900382254141087, + "grad_norm": 2.3491339683532715, + "learning_rate": 6.733335907435295e-05, + "loss": 0.209, + "step": 51540 + }, + { + "epoch": 1.9904243407081355, + "grad_norm": 3.154418468475342, + "learning_rate": 6.730761805475115e-05, + "loss": 0.3855, + "step": 51550 + }, + { + "epoch": 1.9908104560021622, + "grad_norm": 0.6829432845115662, + "learning_rate": 6.728187703514936e-05, + "loss": 0.1427, + "step": 51560 + }, + { + "epoch": 1.991196571296189, + "grad_norm": 1.331217646598816, + "learning_rate": 6.725613601554757e-05, + "loss": 0.1843, + "step": 51570 + }, + { + "epoch": 1.991582686590216, + "grad_norm": 2.35197114944458, + "learning_rate": 6.723039499594579e-05, + "loss": 0.2557, + "step": 51580 + }, + { + "epoch": 1.9919688018842425, + "grad_norm": 0.5971415638923645, + "learning_rate": 6.7204653976344e-05, + "loss": 0.1782, + "step": 51590 + }, + { + "epoch": 1.9923549171782695, + "grad_norm": 1.0941762924194336, + "learning_rate": 6.717891295674223e-05, + "loss": 0.1377, + "step": 51600 + }, + { + "epoch": 1.9927410324722963, + "grad_norm": 0.43831324577331543, + "learning_rate": 6.715317193714044e-05, + "loss": 0.181, + "step": 51610 + }, + { + "epoch": 1.993127147766323, + "grad_norm": 1.4920772314071655, + "learning_rate": 6.712743091753864e-05, + "loss": 0.2724, + "step": 51620 + }, + { + "epoch": 1.9935132630603498, + "grad_norm": 0.6723024845123291, + "learning_rate": 6.710168989793685e-05, + "loss": 0.1732, + "step": 51630 + }, + { + "epoch": 1.9938993783543766, + "grad_norm": 0.925322413444519, + "learning_rate": 6.707594887833507e-05, + "loss": 0.2794, + "step": 51640 + }, + { + "epoch": 1.9942854936484036, + "grad_norm": 0.27710771560668945, + "learning_rate": 6.705020785873328e-05, + "loss": 0.2379, + "step": 51650 + }, + { + "epoch": 1.9946716089424301, + "grad_norm": 0.24498054385185242, + "learning_rate": 6.702446683913151e-05, + "loss": 0.2685, + "step": 51660 + }, + { + "epoch": 1.995057724236457, + "grad_norm": 0.43449532985687256, + "learning_rate": 6.699872581952972e-05, + "loss": 0.2799, + "step": 51670 + }, + { + "epoch": 1.9954438395304837, + "grad_norm": 1.3820387125015259, + "learning_rate": 6.697298479992793e-05, + "loss": 0.3061, + "step": 51680 + }, + { + "epoch": 1.9958299548245106, + "grad_norm": 1.3658883571624756, + "learning_rate": 6.694724378032615e-05, + "loss": 0.2033, + "step": 51690 + }, + { + "epoch": 1.9962160701185374, + "grad_norm": 0.905115008354187, + "learning_rate": 6.692150276072435e-05, + "loss": 0.1817, + "step": 51700 + }, + { + "epoch": 1.9966021854125642, + "grad_norm": 0.6158314347267151, + "learning_rate": 6.689576174112256e-05, + "loss": 0.1853, + "step": 51710 + }, + { + "epoch": 1.996988300706591, + "grad_norm": 0.145170658826828, + "learning_rate": 6.687002072152079e-05, + "loss": 0.1351, + "step": 51720 + }, + { + "epoch": 1.9973744160006177, + "grad_norm": 0.6830449104309082, + "learning_rate": 6.6844279701919e-05, + "loss": 0.1196, + "step": 51730 + }, + { + "epoch": 1.9977605312946447, + "grad_norm": 0.5634799599647522, + "learning_rate": 6.681853868231721e-05, + "loss": 0.1919, + "step": 51740 + }, + { + "epoch": 1.9981466465886712, + "grad_norm": 1.7590057849884033, + "learning_rate": 6.679279766271543e-05, + "loss": 0.5667, + "step": 51750 + }, + { + "epoch": 1.9985327618826982, + "grad_norm": 0.638635516166687, + "learning_rate": 6.676705664311364e-05, + "loss": 0.2653, + "step": 51760 + }, + { + "epoch": 1.998918877176725, + "grad_norm": 2.008990526199341, + "learning_rate": 6.674131562351184e-05, + "loss": 0.3168, + "step": 51770 + }, + { + "epoch": 1.9993049924707518, + "grad_norm": 0.6151747107505798, + "learning_rate": 6.671557460391007e-05, + "loss": 0.2078, + "step": 51780 + }, + { + "epoch": 1.9996911077647785, + "grad_norm": 1.702333688735962, + "learning_rate": 6.668983358430828e-05, + "loss": 0.3154, + "step": 51790 + }, + { + "epoch": 2.0000772230588053, + "grad_norm": 1.1011065244674683, + "learning_rate": 6.66640925647065e-05, + "loss": 0.2318, + "step": 51800 + }, + { + "epoch": 2.0004633383528323, + "grad_norm": 0.7088577747344971, + "learning_rate": 6.663835154510471e-05, + "loss": 0.2106, + "step": 51810 + }, + { + "epoch": 2.000849453646859, + "grad_norm": 0.9032210111618042, + "learning_rate": 6.661261052550292e-05, + "loss": 0.2235, + "step": 51820 + }, + { + "epoch": 2.001235568940886, + "grad_norm": 1.8217551708221436, + "learning_rate": 6.658686950590113e-05, + "loss": 0.2385, + "step": 51830 + }, + { + "epoch": 2.0016216842349124, + "grad_norm": 0.3937009871006012, + "learning_rate": 6.656112848629933e-05, + "loss": 0.0739, + "step": 51840 + }, + { + "epoch": 2.0020077995289394, + "grad_norm": 0.1690339297056198, + "learning_rate": 6.653538746669756e-05, + "loss": 0.2361, + "step": 51850 + }, + { + "epoch": 2.0023939148229664, + "grad_norm": 0.4467180073261261, + "learning_rate": 6.650964644709577e-05, + "loss": 0.1675, + "step": 51860 + }, + { + "epoch": 2.002780030116993, + "grad_norm": 0.6363991498947144, + "learning_rate": 6.648390542749399e-05, + "loss": 0.1565, + "step": 51870 + }, + { + "epoch": 2.00316614541102, + "grad_norm": 0.6243847012519836, + "learning_rate": 6.64581644078922e-05, + "loss": 0.3038, + "step": 51880 + }, + { + "epoch": 2.0035522607050464, + "grad_norm": 1.705432415008545, + "learning_rate": 6.643242338829041e-05, + "loss": 0.1347, + "step": 51890 + }, + { + "epoch": 2.0039383759990734, + "grad_norm": 0.8589022159576416, + "learning_rate": 6.640668236868863e-05, + "loss": 0.1867, + "step": 51900 + }, + { + "epoch": 2.0043244912931, + "grad_norm": 1.468563437461853, + "learning_rate": 6.638094134908684e-05, + "loss": 0.3603, + "step": 51910 + }, + { + "epoch": 2.004710606587127, + "grad_norm": 2.0505447387695312, + "learning_rate": 6.635520032948505e-05, + "loss": 0.3336, + "step": 51920 + }, + { + "epoch": 2.0050967218811535, + "grad_norm": 0.06167216598987579, + "learning_rate": 6.632945930988327e-05, + "loss": 0.0434, + "step": 51930 + }, + { + "epoch": 2.0054828371751805, + "grad_norm": 0.43672385811805725, + "learning_rate": 6.630371829028148e-05, + "loss": 0.2008, + "step": 51940 + }, + { + "epoch": 2.0058689524692075, + "grad_norm": 0.20339979231357574, + "learning_rate": 6.62779772706797e-05, + "loss": 0.1828, + "step": 51950 + }, + { + "epoch": 2.006255067763234, + "grad_norm": 0.9884247779846191, + "learning_rate": 6.625223625107791e-05, + "loss": 0.1913, + "step": 51960 + }, + { + "epoch": 2.006641183057261, + "grad_norm": 1.9355684518814087, + "learning_rate": 6.622649523147612e-05, + "loss": 0.2164, + "step": 51970 + }, + { + "epoch": 2.0070272983512876, + "grad_norm": 0.6647536754608154, + "learning_rate": 6.620075421187433e-05, + "loss": 0.1993, + "step": 51980 + }, + { + "epoch": 2.0074134136453146, + "grad_norm": 0.5795693397521973, + "learning_rate": 6.617501319227255e-05, + "loss": 0.1887, + "step": 51990 + }, + { + "epoch": 2.007799528939341, + "grad_norm": 1.605940341949463, + "learning_rate": 6.614927217267076e-05, + "loss": 0.247, + "step": 52000 + }, + { + "epoch": 2.008185644233368, + "grad_norm": 1.3359249830245972, + "learning_rate": 6.612353115306897e-05, + "loss": 0.2031, + "step": 52010 + }, + { + "epoch": 2.008571759527395, + "grad_norm": 3.889131784439087, + "learning_rate": 6.609779013346719e-05, + "loss": 0.1584, + "step": 52020 + }, + { + "epoch": 2.0089578748214216, + "grad_norm": 1.9775649309158325, + "learning_rate": 6.607204911386541e-05, + "loss": 0.1041, + "step": 52030 + }, + { + "epoch": 2.0093439901154486, + "grad_norm": 0.13655538856983185, + "learning_rate": 6.604630809426363e-05, + "loss": 0.1621, + "step": 52040 + }, + { + "epoch": 2.009730105409475, + "grad_norm": 0.659899115562439, + "learning_rate": 6.602056707466183e-05, + "loss": 0.1603, + "step": 52050 + }, + { + "epoch": 2.010116220703502, + "grad_norm": 0.5897572040557861, + "learning_rate": 6.599482605506004e-05, + "loss": 0.1807, + "step": 52060 + }, + { + "epoch": 2.0105023359975287, + "grad_norm": 1.9831010103225708, + "learning_rate": 6.596908503545825e-05, + "loss": 0.1517, + "step": 52070 + }, + { + "epoch": 2.0108884512915557, + "grad_norm": 0.16144464910030365, + "learning_rate": 6.594334401585647e-05, + "loss": 0.0999, + "step": 52080 + }, + { + "epoch": 2.0112745665855822, + "grad_norm": 1.6258141994476318, + "learning_rate": 6.591760299625468e-05, + "loss": 0.2152, + "step": 52090 + }, + { + "epoch": 2.0116606818796092, + "grad_norm": 1.0857526063919067, + "learning_rate": 6.589186197665291e-05, + "loss": 0.3286, + "step": 52100 + }, + { + "epoch": 2.012046797173636, + "grad_norm": 0.2918669581413269, + "learning_rate": 6.586612095705112e-05, + "loss": 0.0839, + "step": 52110 + }, + { + "epoch": 2.0124329124676628, + "grad_norm": 0.7994667887687683, + "learning_rate": 6.584037993744932e-05, + "loss": 0.0723, + "step": 52120 + }, + { + "epoch": 2.0128190277616897, + "grad_norm": 1.1056885719299316, + "learning_rate": 6.581463891784753e-05, + "loss": 0.2936, + "step": 52130 + }, + { + "epoch": 2.0132051430557163, + "grad_norm": 1.6628743410110474, + "learning_rate": 6.578889789824575e-05, + "loss": 0.1095, + "step": 52140 + }, + { + "epoch": 2.0135912583497433, + "grad_norm": 0.7514179348945618, + "learning_rate": 6.576315687864396e-05, + "loss": 0.2834, + "step": 52150 + }, + { + "epoch": 2.01397737364377, + "grad_norm": 0.17141447961330414, + "learning_rate": 6.573741585904219e-05, + "loss": 0.0834, + "step": 52160 + }, + { + "epoch": 2.014363488937797, + "grad_norm": 1.6377034187316895, + "learning_rate": 6.57116748394404e-05, + "loss": 0.1515, + "step": 52170 + }, + { + "epoch": 2.014749604231824, + "grad_norm": 2.604389190673828, + "learning_rate": 6.568593381983861e-05, + "loss": 0.0964, + "step": 52180 + }, + { + "epoch": 2.0151357195258504, + "grad_norm": 0.737349808216095, + "learning_rate": 6.566019280023681e-05, + "loss": 0.1861, + "step": 52190 + }, + { + "epoch": 2.0155218348198773, + "grad_norm": 0.499905526638031, + "learning_rate": 6.563445178063503e-05, + "loss": 0.2146, + "step": 52200 + }, + { + "epoch": 2.015907950113904, + "grad_norm": 1.0351229906082153, + "learning_rate": 6.560871076103324e-05, + "loss": 0.2461, + "step": 52210 + }, + { + "epoch": 2.016294065407931, + "grad_norm": 1.6009700298309326, + "learning_rate": 6.558296974143147e-05, + "loss": 0.2325, + "step": 52220 + }, + { + "epoch": 2.0166801807019574, + "grad_norm": 1.5281599760055542, + "learning_rate": 6.555722872182968e-05, + "loss": 0.2172, + "step": 52230 + }, + { + "epoch": 2.0170662959959844, + "grad_norm": 0.7039555907249451, + "learning_rate": 6.553148770222789e-05, + "loss": 0.3143, + "step": 52240 + }, + { + "epoch": 2.0174524112900114, + "grad_norm": 1.310943365097046, + "learning_rate": 6.55057466826261e-05, + "loss": 0.1785, + "step": 52250 + }, + { + "epoch": 2.017838526584038, + "grad_norm": 0.6066591143608093, + "learning_rate": 6.548000566302432e-05, + "loss": 0.1295, + "step": 52260 + }, + { + "epoch": 2.018224641878065, + "grad_norm": 2.3664653301239014, + "learning_rate": 6.545426464342252e-05, + "loss": 0.229, + "step": 52270 + }, + { + "epoch": 2.0186107571720915, + "grad_norm": 1.0997484922409058, + "learning_rate": 6.542852362382075e-05, + "loss": 0.1494, + "step": 52280 + }, + { + "epoch": 2.0189968724661185, + "grad_norm": 3.237204074859619, + "learning_rate": 6.540278260421896e-05, + "loss": 0.1952, + "step": 52290 + }, + { + "epoch": 2.019382987760145, + "grad_norm": 1.9585202932357788, + "learning_rate": 6.537704158461717e-05, + "loss": 0.1986, + "step": 52300 + }, + { + "epoch": 2.019769103054172, + "grad_norm": 0.18385589122772217, + "learning_rate": 6.535130056501539e-05, + "loss": 0.1373, + "step": 52310 + }, + { + "epoch": 2.0201552183481986, + "grad_norm": 1.4465802907943726, + "learning_rate": 6.53255595454136e-05, + "loss": 0.2492, + "step": 52320 + }, + { + "epoch": 2.0205413336422255, + "grad_norm": 0.3525356352329254, + "learning_rate": 6.529981852581181e-05, + "loss": 0.1046, + "step": 52330 + }, + { + "epoch": 2.0209274489362525, + "grad_norm": 0.10468830168247223, + "learning_rate": 6.527407750621003e-05, + "loss": 0.0702, + "step": 52340 + }, + { + "epoch": 2.021313564230279, + "grad_norm": 0.2023550570011139, + "learning_rate": 6.524833648660824e-05, + "loss": 0.2134, + "step": 52350 + }, + { + "epoch": 2.021699679524306, + "grad_norm": 1.8294217586517334, + "learning_rate": 6.522259546700645e-05, + "loss": 0.2443, + "step": 52360 + }, + { + "epoch": 2.0220857948183326, + "grad_norm": 0.3425254225730896, + "learning_rate": 6.519685444740467e-05, + "loss": 0.151, + "step": 52370 + }, + { + "epoch": 2.0224719101123596, + "grad_norm": 0.3099939227104187, + "learning_rate": 6.517111342780288e-05, + "loss": 0.1133, + "step": 52380 + }, + { + "epoch": 2.022858025406386, + "grad_norm": 2.0373382568359375, + "learning_rate": 6.514537240820109e-05, + "loss": 0.1773, + "step": 52390 + }, + { + "epoch": 2.023244140700413, + "grad_norm": 0.5817141532897949, + "learning_rate": 6.51196313885993e-05, + "loss": 0.1276, + "step": 52400 + }, + { + "epoch": 2.02363025599444, + "grad_norm": 2.0622966289520264, + "learning_rate": 6.509389036899752e-05, + "loss": 0.1889, + "step": 52410 + }, + { + "epoch": 2.0240163712884667, + "grad_norm": 1.1906920671463013, + "learning_rate": 6.506814934939573e-05, + "loss": 0.096, + "step": 52420 + }, + { + "epoch": 2.0244024865824937, + "grad_norm": 0.1317962408065796, + "learning_rate": 6.504240832979395e-05, + "loss": 0.2232, + "step": 52430 + }, + { + "epoch": 2.02478860187652, + "grad_norm": 0.13029718399047852, + "learning_rate": 6.501666731019216e-05, + "loss": 0.1511, + "step": 52440 + }, + { + "epoch": 2.025174717170547, + "grad_norm": 0.793836772441864, + "learning_rate": 6.499092629059037e-05, + "loss": 0.1654, + "step": 52450 + }, + { + "epoch": 2.0255608324645737, + "grad_norm": 0.5743208527565002, + "learning_rate": 6.496518527098859e-05, + "loss": 0.2694, + "step": 52460 + }, + { + "epoch": 2.0259469477586007, + "grad_norm": 0.9897276759147644, + "learning_rate": 6.49394442513868e-05, + "loss": 0.2149, + "step": 52470 + }, + { + "epoch": 2.0263330630526277, + "grad_norm": 2.601984977722168, + "learning_rate": 6.491370323178501e-05, + "loss": 0.1285, + "step": 52480 + }, + { + "epoch": 2.0267191783466543, + "grad_norm": 1.3703612089157104, + "learning_rate": 6.488796221218323e-05, + "loss": 0.1226, + "step": 52490 + }, + { + "epoch": 2.0271052936406813, + "grad_norm": 0.8976957201957703, + "learning_rate": 6.486222119258144e-05, + "loss": 0.3069, + "step": 52500 + }, + { + "epoch": 2.027491408934708, + "grad_norm": 1.4867346286773682, + "learning_rate": 6.483648017297965e-05, + "loss": 0.1804, + "step": 52510 + }, + { + "epoch": 2.027877524228735, + "grad_norm": 0.9173004031181335, + "learning_rate": 6.481073915337787e-05, + "loss": 0.193, + "step": 52520 + }, + { + "epoch": 2.0282636395227613, + "grad_norm": 1.4100719690322876, + "learning_rate": 6.478499813377609e-05, + "loss": 0.2443, + "step": 52530 + }, + { + "epoch": 2.0286497548167883, + "grad_norm": 0.08620387315750122, + "learning_rate": 6.475925711417429e-05, + "loss": 0.2858, + "step": 52540 + }, + { + "epoch": 2.029035870110815, + "grad_norm": 0.6817231178283691, + "learning_rate": 6.47335160945725e-05, + "loss": 0.1421, + "step": 52550 + }, + { + "epoch": 2.029421985404842, + "grad_norm": 0.7937541604042053, + "learning_rate": 6.470777507497072e-05, + "loss": 0.1856, + "step": 52560 + }, + { + "epoch": 2.029808100698869, + "grad_norm": 0.3391193151473999, + "learning_rate": 6.468203405536893e-05, + "loss": 0.2714, + "step": 52570 + }, + { + "epoch": 2.0301942159928954, + "grad_norm": 0.45617592334747314, + "learning_rate": 6.465629303576715e-05, + "loss": 0.1528, + "step": 52580 + }, + { + "epoch": 2.0305803312869224, + "grad_norm": 0.6412602663040161, + "learning_rate": 6.463055201616537e-05, + "loss": 0.0908, + "step": 52590 + }, + { + "epoch": 2.030966446580949, + "grad_norm": 0.5588594079017639, + "learning_rate": 6.460481099656359e-05, + "loss": 0.1448, + "step": 52600 + }, + { + "epoch": 2.031352561874976, + "grad_norm": 0.5726466774940491, + "learning_rate": 6.457906997696179e-05, + "loss": 0.2771, + "step": 52610 + }, + { + "epoch": 2.0317386771690025, + "grad_norm": 1.625110387802124, + "learning_rate": 6.455332895736e-05, + "loss": 0.1147, + "step": 52620 + }, + { + "epoch": 2.0321247924630295, + "grad_norm": 0.9171527624130249, + "learning_rate": 6.452758793775821e-05, + "loss": 0.0847, + "step": 52630 + }, + { + "epoch": 2.0325109077570565, + "grad_norm": 0.1905253678560257, + "learning_rate": 6.450184691815643e-05, + "loss": 0.2086, + "step": 52640 + }, + { + "epoch": 2.032897023051083, + "grad_norm": 0.2381320744752884, + "learning_rate": 6.447610589855464e-05, + "loss": 0.1767, + "step": 52650 + }, + { + "epoch": 2.03328313834511, + "grad_norm": 2.2482104301452637, + "learning_rate": 6.445036487895287e-05, + "loss": 0.2626, + "step": 52660 + }, + { + "epoch": 2.0336692536391365, + "grad_norm": 2.5901641845703125, + "learning_rate": 6.442462385935108e-05, + "loss": 0.21, + "step": 52670 + }, + { + "epoch": 2.0340553689331635, + "grad_norm": 3.242913007736206, + "learning_rate": 6.439888283974929e-05, + "loss": 0.3508, + "step": 52680 + }, + { + "epoch": 2.03444148422719, + "grad_norm": 0.12099716812372208, + "learning_rate": 6.437314182014749e-05, + "loss": 0.1109, + "step": 52690 + }, + { + "epoch": 2.034827599521217, + "grad_norm": 1.384753704071045, + "learning_rate": 6.43474008005457e-05, + "loss": 0.2023, + "step": 52700 + }, + { + "epoch": 2.035213714815244, + "grad_norm": 1.1507978439331055, + "learning_rate": 6.432165978094392e-05, + "loss": 0.2656, + "step": 52710 + }, + { + "epoch": 2.0355998301092706, + "grad_norm": 0.43186309933662415, + "learning_rate": 6.429591876134215e-05, + "loss": 0.2265, + "step": 52720 + }, + { + "epoch": 2.0359859454032976, + "grad_norm": 1.2791942358016968, + "learning_rate": 6.427017774174036e-05, + "loss": 0.189, + "step": 52730 + }, + { + "epoch": 2.036372060697324, + "grad_norm": 2.613863229751587, + "learning_rate": 6.424443672213857e-05, + "loss": 0.1636, + "step": 52740 + }, + { + "epoch": 2.036758175991351, + "grad_norm": 0.9245595932006836, + "learning_rate": 6.421869570253679e-05, + "loss": 0.2844, + "step": 52750 + }, + { + "epoch": 2.0371442912853777, + "grad_norm": 0.5449417233467102, + "learning_rate": 6.419295468293499e-05, + "loss": 0.24, + "step": 52760 + }, + { + "epoch": 2.0375304065794047, + "grad_norm": 0.24012671411037445, + "learning_rate": 6.41672136633332e-05, + "loss": 0.226, + "step": 52770 + }, + { + "epoch": 2.037916521873431, + "grad_norm": 0.07777285575866699, + "learning_rate": 6.414147264373143e-05, + "loss": 0.1351, + "step": 52780 + }, + { + "epoch": 2.038302637167458, + "grad_norm": 1.4423730373382568, + "learning_rate": 6.411573162412964e-05, + "loss": 0.1358, + "step": 52790 + }, + { + "epoch": 2.038688752461485, + "grad_norm": 0.9842507243156433, + "learning_rate": 6.408999060452785e-05, + "loss": 0.1947, + "step": 52800 + }, + { + "epoch": 2.0390748677555117, + "grad_norm": 0.23728783428668976, + "learning_rate": 6.406424958492607e-05, + "loss": 0.1655, + "step": 52810 + }, + { + "epoch": 2.0394609830495387, + "grad_norm": 1.0350521802902222, + "learning_rate": 6.403850856532428e-05, + "loss": 0.2269, + "step": 52820 + }, + { + "epoch": 2.0398470983435653, + "grad_norm": 0.7008020877838135, + "learning_rate": 6.401276754572248e-05, + "loss": 0.0694, + "step": 52830 + }, + { + "epoch": 2.0402332136375922, + "grad_norm": 0.2844768464565277, + "learning_rate": 6.39870265261207e-05, + "loss": 0.2871, + "step": 52840 + }, + { + "epoch": 2.040619328931619, + "grad_norm": 0.10209562629461288, + "learning_rate": 6.396128550651892e-05, + "loss": 0.1556, + "step": 52850 + }, + { + "epoch": 2.041005444225646, + "grad_norm": 0.3833054006099701, + "learning_rate": 6.393554448691713e-05, + "loss": 0.2414, + "step": 52860 + }, + { + "epoch": 2.0413915595196728, + "grad_norm": 1.023119330406189, + "learning_rate": 6.390980346731535e-05, + "loss": 0.3514, + "step": 52870 + }, + { + "epoch": 2.0417776748136993, + "grad_norm": 0.2640077769756317, + "learning_rate": 6.388406244771356e-05, + "loss": 0.1651, + "step": 52880 + }, + { + "epoch": 2.0421637901077263, + "grad_norm": 0.5331079363822937, + "learning_rate": 6.385832142811177e-05, + "loss": 0.225, + "step": 52890 + }, + { + "epoch": 2.042549905401753, + "grad_norm": 1.8062942028045654, + "learning_rate": 6.383258040850999e-05, + "loss": 0.145, + "step": 52900 + }, + { + "epoch": 2.04293602069578, + "grad_norm": 0.5096505284309387, + "learning_rate": 6.38068393889082e-05, + "loss": 0.1353, + "step": 52910 + }, + { + "epoch": 2.0433221359898064, + "grad_norm": 0.6749324798583984, + "learning_rate": 6.378109836930641e-05, + "loss": 0.2563, + "step": 52920 + }, + { + "epoch": 2.0437082512838334, + "grad_norm": 2.487567186355591, + "learning_rate": 6.375535734970463e-05, + "loss": 0.1616, + "step": 52930 + }, + { + "epoch": 2.0440943665778604, + "grad_norm": 0.7904801964759827, + "learning_rate": 6.372961633010284e-05, + "loss": 0.1579, + "step": 52940 + }, + { + "epoch": 2.044480481871887, + "grad_norm": 2.078003406524658, + "learning_rate": 6.370387531050105e-05, + "loss": 0.2368, + "step": 52950 + }, + { + "epoch": 2.044866597165914, + "grad_norm": 0.778519332408905, + "learning_rate": 6.367813429089927e-05, + "loss": 0.1509, + "step": 52960 + }, + { + "epoch": 2.0452527124599404, + "grad_norm": 0.919970691204071, + "learning_rate": 6.365239327129748e-05, + "loss": 0.1493, + "step": 52970 + }, + { + "epoch": 2.0456388277539674, + "grad_norm": 0.06400478631258011, + "learning_rate": 6.362665225169569e-05, + "loss": 0.2599, + "step": 52980 + }, + { + "epoch": 2.046024943047994, + "grad_norm": 3.479253053665161, + "learning_rate": 6.36009112320939e-05, + "loss": 0.2582, + "step": 52990 + }, + { + "epoch": 2.046411058342021, + "grad_norm": 0.5447397232055664, + "learning_rate": 6.357517021249212e-05, + "loss": 0.0906, + "step": 53000 + }, + { + "epoch": 2.0467971736360475, + "grad_norm": 1.1518877744674683, + "learning_rate": 6.354942919289033e-05, + "loss": 0.1255, + "step": 53010 + }, + { + "epoch": 2.0471832889300745, + "grad_norm": 1.782228708267212, + "learning_rate": 6.352368817328855e-05, + "loss": 0.4931, + "step": 53020 + }, + { + "epoch": 2.0475694042241015, + "grad_norm": 1.1501574516296387, + "learning_rate": 6.349794715368677e-05, + "loss": 0.1242, + "step": 53030 + }, + { + "epoch": 2.047955519518128, + "grad_norm": 2.69543719291687, + "learning_rate": 6.347220613408497e-05, + "loss": 0.2079, + "step": 53040 + }, + { + "epoch": 2.048341634812155, + "grad_norm": 0.6990146040916443, + "learning_rate": 6.344646511448318e-05, + "loss": 0.1033, + "step": 53050 + }, + { + "epoch": 2.0487277501061816, + "grad_norm": 2.3171396255493164, + "learning_rate": 6.34207240948814e-05, + "loss": 0.2182, + "step": 53060 + }, + { + "epoch": 2.0491138654002086, + "grad_norm": 0.11202628165483475, + "learning_rate": 6.339498307527961e-05, + "loss": 0.1862, + "step": 53070 + }, + { + "epoch": 2.049499980694235, + "grad_norm": 0.0545661523938179, + "learning_rate": 6.336924205567782e-05, + "loss": 0.1641, + "step": 53080 + }, + { + "epoch": 2.049886095988262, + "grad_norm": 0.9861188530921936, + "learning_rate": 6.334350103607605e-05, + "loss": 0.1862, + "step": 53090 + }, + { + "epoch": 2.050272211282289, + "grad_norm": 0.6137722730636597, + "learning_rate": 6.331776001647427e-05, + "loss": 0.1213, + "step": 53100 + }, + { + "epoch": 2.0506583265763156, + "grad_norm": 0.3138205409049988, + "learning_rate": 6.329201899687246e-05, + "loss": 0.2589, + "step": 53110 + }, + { + "epoch": 2.0510444418703426, + "grad_norm": 1.4361293315887451, + "learning_rate": 6.326627797727068e-05, + "loss": 0.0854, + "step": 53120 + }, + { + "epoch": 2.051430557164369, + "grad_norm": 0.18099132180213928, + "learning_rate": 6.324053695766889e-05, + "loss": 0.173, + "step": 53130 + }, + { + "epoch": 2.051816672458396, + "grad_norm": 1.630255103111267, + "learning_rate": 6.32147959380671e-05, + "loss": 0.2573, + "step": 53140 + }, + { + "epoch": 2.0522027877524227, + "grad_norm": 1.0745834112167358, + "learning_rate": 6.318905491846532e-05, + "loss": 0.1249, + "step": 53150 + }, + { + "epoch": 2.0525889030464497, + "grad_norm": 0.15289072692394257, + "learning_rate": 6.316331389886354e-05, + "loss": 0.204, + "step": 53160 + }, + { + "epoch": 2.0529750183404767, + "grad_norm": 0.7459616661071777, + "learning_rate": 6.313757287926176e-05, + "loss": 0.1632, + "step": 53170 + }, + { + "epoch": 2.0533611336345032, + "grad_norm": 2.7093374729156494, + "learning_rate": 6.311183185965996e-05, + "loss": 0.1477, + "step": 53180 + }, + { + "epoch": 2.0537472489285302, + "grad_norm": 0.1735665500164032, + "learning_rate": 6.308609084005817e-05, + "loss": 0.1736, + "step": 53190 + }, + { + "epoch": 2.0541333642225568, + "grad_norm": 0.3297293186187744, + "learning_rate": 6.306034982045638e-05, + "loss": 0.0974, + "step": 53200 + }, + { + "epoch": 2.0545194795165838, + "grad_norm": 0.023612016811966896, + "learning_rate": 6.30346088008546e-05, + "loss": 0.1451, + "step": 53210 + }, + { + "epoch": 2.0549055948106103, + "grad_norm": 1.3713831901550293, + "learning_rate": 6.300886778125282e-05, + "loss": 0.1103, + "step": 53220 + }, + { + "epoch": 2.0552917101046373, + "grad_norm": 0.5888402462005615, + "learning_rate": 6.298312676165104e-05, + "loss": 0.1708, + "step": 53230 + }, + { + "epoch": 2.055677825398664, + "grad_norm": 1.8231903314590454, + "learning_rate": 6.295738574204925e-05, + "loss": 0.2337, + "step": 53240 + }, + { + "epoch": 2.056063940692691, + "grad_norm": 0.5738593935966492, + "learning_rate": 6.293164472244746e-05, + "loss": 0.1951, + "step": 53250 + }, + { + "epoch": 2.056450055986718, + "grad_norm": 0.13595734536647797, + "learning_rate": 6.290590370284566e-05, + "loss": 0.1381, + "step": 53260 + }, + { + "epoch": 2.0568361712807444, + "grad_norm": 0.9126518368721008, + "learning_rate": 6.288016268324388e-05, + "loss": 0.1247, + "step": 53270 + }, + { + "epoch": 2.0572222865747714, + "grad_norm": 0.5245015621185303, + "learning_rate": 6.28544216636421e-05, + "loss": 0.2851, + "step": 53280 + }, + { + "epoch": 2.057608401868798, + "grad_norm": 0.08704059571027756, + "learning_rate": 6.282868064404032e-05, + "loss": 0.1425, + "step": 53290 + }, + { + "epoch": 2.057994517162825, + "grad_norm": 0.3915903866291046, + "learning_rate": 6.280293962443853e-05, + "loss": 0.1683, + "step": 53300 + }, + { + "epoch": 2.0583806324568514, + "grad_norm": 1.5422857999801636, + "learning_rate": 6.277719860483674e-05, + "loss": 0.178, + "step": 53310 + }, + { + "epoch": 2.0587667477508784, + "grad_norm": 0.6210331320762634, + "learning_rate": 6.275145758523496e-05, + "loss": 0.1279, + "step": 53320 + }, + { + "epoch": 2.0591528630449054, + "grad_norm": 1.8133548498153687, + "learning_rate": 6.272571656563316e-05, + "loss": 0.1276, + "step": 53330 + }, + { + "epoch": 2.059538978338932, + "grad_norm": 1.8391001224517822, + "learning_rate": 6.269997554603138e-05, + "loss": 0.1533, + "step": 53340 + }, + { + "epoch": 2.059925093632959, + "grad_norm": 0.48838570713996887, + "learning_rate": 6.26742345264296e-05, + "loss": 0.1334, + "step": 53350 + }, + { + "epoch": 2.0603112089269855, + "grad_norm": 1.6755399703979492, + "learning_rate": 6.264849350682781e-05, + "loss": 0.1874, + "step": 53360 + }, + { + "epoch": 2.0606973242210125, + "grad_norm": 0.6416677236557007, + "learning_rate": 6.262275248722602e-05, + "loss": 0.0775, + "step": 53370 + }, + { + "epoch": 2.061083439515039, + "grad_norm": 1.258190393447876, + "learning_rate": 6.259701146762424e-05, + "loss": 0.2183, + "step": 53380 + }, + { + "epoch": 2.061469554809066, + "grad_norm": 1.543530821800232, + "learning_rate": 6.257127044802245e-05, + "loss": 0.2124, + "step": 53390 + }, + { + "epoch": 2.0618556701030926, + "grad_norm": 1.7885076999664307, + "learning_rate": 6.254552942842065e-05, + "loss": 0.1746, + "step": 53400 + }, + { + "epoch": 2.0622417853971196, + "grad_norm": 0.7699170708656311, + "learning_rate": 6.251978840881888e-05, + "loss": 0.0489, + "step": 53410 + }, + { + "epoch": 2.0626279006911465, + "grad_norm": 0.6853735446929932, + "learning_rate": 6.249404738921709e-05, + "loss": 0.0828, + "step": 53420 + }, + { + "epoch": 2.063014015985173, + "grad_norm": 1.250551462173462, + "learning_rate": 6.24683063696153e-05, + "loss": 0.1361, + "step": 53430 + }, + { + "epoch": 2.0634001312792, + "grad_norm": 0.20269331336021423, + "learning_rate": 6.244256535001352e-05, + "loss": 0.1916, + "step": 53440 + }, + { + "epoch": 2.0637862465732266, + "grad_norm": 0.868029773235321, + "learning_rate": 6.241682433041173e-05, + "loss": 0.1665, + "step": 53450 + }, + { + "epoch": 2.0641723618672536, + "grad_norm": 0.5697484016418457, + "learning_rate": 6.239108331080994e-05, + "loss": 0.1873, + "step": 53460 + }, + { + "epoch": 2.06455847716128, + "grad_norm": 2.3145039081573486, + "learning_rate": 6.236534229120816e-05, + "loss": 0.3618, + "step": 53470 + }, + { + "epoch": 2.064944592455307, + "grad_norm": 0.3517427444458008, + "learning_rate": 6.233960127160637e-05, + "loss": 0.1844, + "step": 53480 + }, + { + "epoch": 2.065330707749334, + "grad_norm": 0.6968696117401123, + "learning_rate": 6.231386025200458e-05, + "loss": 0.1229, + "step": 53490 + }, + { + "epoch": 2.0657168230433607, + "grad_norm": 1.4328505992889404, + "learning_rate": 6.22881192324028e-05, + "loss": 0.1921, + "step": 53500 + }, + { + "epoch": 2.0661029383373877, + "grad_norm": 0.0647716224193573, + "learning_rate": 6.226237821280101e-05, + "loss": 0.1157, + "step": 53510 + }, + { + "epoch": 2.066489053631414, + "grad_norm": 0.1405775398015976, + "learning_rate": 6.223663719319922e-05, + "loss": 0.1593, + "step": 53520 + }, + { + "epoch": 2.066875168925441, + "grad_norm": 0.8191878795623779, + "learning_rate": 6.221089617359744e-05, + "loss": 0.3237, + "step": 53530 + }, + { + "epoch": 2.0672612842194678, + "grad_norm": 0.16709165275096893, + "learning_rate": 6.218515515399565e-05, + "loss": 0.1098, + "step": 53540 + }, + { + "epoch": 2.0676473995134947, + "grad_norm": 1.2044661045074463, + "learning_rate": 6.215941413439386e-05, + "loss": 0.2014, + "step": 53550 + }, + { + "epoch": 2.0680335148075217, + "grad_norm": 0.7808303236961365, + "learning_rate": 6.213367311479208e-05, + "loss": 0.1122, + "step": 53560 + }, + { + "epoch": 2.0684196301015483, + "grad_norm": 0.8508942723274231, + "learning_rate": 6.210793209519029e-05, + "loss": 0.2162, + "step": 53570 + }, + { + "epoch": 2.0688057453955753, + "grad_norm": 0.1512947827577591, + "learning_rate": 6.20821910755885e-05, + "loss": 0.1382, + "step": 53580 + }, + { + "epoch": 2.069191860689602, + "grad_norm": 0.10546062141656876, + "learning_rate": 6.205645005598673e-05, + "loss": 0.0962, + "step": 53590 + }, + { + "epoch": 2.069577975983629, + "grad_norm": 0.7787978053092957, + "learning_rate": 6.203070903638494e-05, + "loss": 0.2142, + "step": 53600 + }, + { + "epoch": 2.0699640912776553, + "grad_norm": 0.1247776597738266, + "learning_rate": 6.200496801678314e-05, + "loss": 0.1788, + "step": 53610 + }, + { + "epoch": 2.0703502065716823, + "grad_norm": 1.5636411905288696, + "learning_rate": 6.197922699718136e-05, + "loss": 0.2244, + "step": 53620 + }, + { + "epoch": 2.0707363218657093, + "grad_norm": 1.5239825248718262, + "learning_rate": 6.195348597757957e-05, + "loss": 0.152, + "step": 53630 + }, + { + "epoch": 2.071122437159736, + "grad_norm": 0.6676948070526123, + "learning_rate": 6.192774495797778e-05, + "loss": 0.1007, + "step": 53640 + }, + { + "epoch": 2.071508552453763, + "grad_norm": 2.0146231651306152, + "learning_rate": 6.190200393837601e-05, + "loss": 0.1881, + "step": 53650 + }, + { + "epoch": 2.0718946677477894, + "grad_norm": 1.3102779388427734, + "learning_rate": 6.187626291877422e-05, + "loss": 0.2036, + "step": 53660 + }, + { + "epoch": 2.0722807830418164, + "grad_norm": 1.230490803718567, + "learning_rate": 6.185052189917244e-05, + "loss": 0.235, + "step": 53670 + }, + { + "epoch": 2.072666898335843, + "grad_norm": 1.2379542589187622, + "learning_rate": 6.182478087957064e-05, + "loss": 0.2005, + "step": 53680 + }, + { + "epoch": 2.07305301362987, + "grad_norm": 0.9768775701522827, + "learning_rate": 6.179903985996885e-05, + "loss": 0.1885, + "step": 53690 + }, + { + "epoch": 2.0734391289238965, + "grad_norm": 1.2973617315292358, + "learning_rate": 6.177329884036706e-05, + "loss": 0.1218, + "step": 53700 + }, + { + "epoch": 2.0738252442179235, + "grad_norm": 0.6314378380775452, + "learning_rate": 6.174755782076528e-05, + "loss": 0.2194, + "step": 53710 + }, + { + "epoch": 2.0742113595119505, + "grad_norm": 1.7972043752670288, + "learning_rate": 6.17218168011635e-05, + "loss": 0.2668, + "step": 53720 + }, + { + "epoch": 2.074597474805977, + "grad_norm": 1.1307156085968018, + "learning_rate": 6.169607578156172e-05, + "loss": 0.2094, + "step": 53730 + }, + { + "epoch": 2.074983590100004, + "grad_norm": 0.07750687003135681, + "learning_rate": 6.167033476195993e-05, + "loss": 0.1556, + "step": 53740 + }, + { + "epoch": 2.0753697053940305, + "grad_norm": 0.33502569794654846, + "learning_rate": 6.164459374235813e-05, + "loss": 0.1849, + "step": 53750 + }, + { + "epoch": 2.0757558206880575, + "grad_norm": 0.462332546710968, + "learning_rate": 6.161885272275634e-05, + "loss": 0.1704, + "step": 53760 + }, + { + "epoch": 2.076141935982084, + "grad_norm": 0.7019705772399902, + "learning_rate": 6.159311170315456e-05, + "loss": 0.155, + "step": 53770 + }, + { + "epoch": 2.076528051276111, + "grad_norm": 1.2807412147521973, + "learning_rate": 6.156737068355278e-05, + "loss": 0.2956, + "step": 53780 + }, + { + "epoch": 2.076914166570138, + "grad_norm": 0.9197677373886108, + "learning_rate": 6.1541629663951e-05, + "loss": 0.0723, + "step": 53790 + }, + { + "epoch": 2.0773002818641646, + "grad_norm": 0.15677478909492493, + "learning_rate": 6.151588864434921e-05, + "loss": 0.2835, + "step": 53800 + }, + { + "epoch": 2.0776863971581916, + "grad_norm": 1.7673814296722412, + "learning_rate": 6.149014762474742e-05, + "loss": 0.1127, + "step": 53810 + }, + { + "epoch": 2.078072512452218, + "grad_norm": 1.9662418365478516, + "learning_rate": 6.146440660514562e-05, + "loss": 0.1314, + "step": 53820 + }, + { + "epoch": 2.078458627746245, + "grad_norm": 0.11253755539655685, + "learning_rate": 6.143866558554384e-05, + "loss": 0.2855, + "step": 53830 + }, + { + "epoch": 2.0788447430402717, + "grad_norm": 2.4275155067443848, + "learning_rate": 6.141292456594206e-05, + "loss": 0.2794, + "step": 53840 + }, + { + "epoch": 2.0792308583342987, + "grad_norm": 0.4845966100692749, + "learning_rate": 6.138718354634028e-05, + "loss": 0.1129, + "step": 53850 + }, + { + "epoch": 2.079616973628325, + "grad_norm": 0.2127775400876999, + "learning_rate": 6.136144252673849e-05, + "loss": 0.1089, + "step": 53860 + }, + { + "epoch": 2.080003088922352, + "grad_norm": 1.554506778717041, + "learning_rate": 6.13357015071367e-05, + "loss": 0.1196, + "step": 53870 + }, + { + "epoch": 2.080389204216379, + "grad_norm": 0.08301983028650284, + "learning_rate": 6.130996048753492e-05, + "loss": 0.3858, + "step": 53880 + }, + { + "epoch": 2.0807753195104057, + "grad_norm": 1.0547988414764404, + "learning_rate": 6.128421946793313e-05, + "loss": 0.1348, + "step": 53890 + }, + { + "epoch": 2.0811614348044327, + "grad_norm": 0.3954383432865143, + "learning_rate": 6.125847844833134e-05, + "loss": 0.1347, + "step": 53900 + }, + { + "epoch": 2.0815475500984593, + "grad_norm": 0.25809749960899353, + "learning_rate": 6.123273742872956e-05, + "loss": 0.4701, + "step": 53910 + }, + { + "epoch": 2.0819336653924863, + "grad_norm": 0.9337195754051208, + "learning_rate": 6.120699640912777e-05, + "loss": 0.1335, + "step": 53920 + }, + { + "epoch": 2.082319780686513, + "grad_norm": 0.9139271378517151, + "learning_rate": 6.118125538952598e-05, + "loss": 0.1095, + "step": 53930 + }, + { + "epoch": 2.08270589598054, + "grad_norm": 0.31789037585258484, + "learning_rate": 6.11555143699242e-05, + "loss": 0.1116, + "step": 53940 + }, + { + "epoch": 2.083092011274567, + "grad_norm": 1.4585286378860474, + "learning_rate": 6.112977335032241e-05, + "loss": 0.2283, + "step": 53950 + }, + { + "epoch": 2.0834781265685933, + "grad_norm": 1.2569290399551392, + "learning_rate": 6.110403233072062e-05, + "loss": 0.238, + "step": 53960 + }, + { + "epoch": 2.0838642418626203, + "grad_norm": 0.08168485015630722, + "learning_rate": 6.107829131111884e-05, + "loss": 0.0965, + "step": 53970 + }, + { + "epoch": 2.084250357156647, + "grad_norm": 0.42119330167770386, + "learning_rate": 6.105255029151705e-05, + "loss": 0.288, + "step": 53980 + }, + { + "epoch": 2.084636472450674, + "grad_norm": 0.04520781710743904, + "learning_rate": 6.102680927191526e-05, + "loss": 0.1616, + "step": 53990 + }, + { + "epoch": 2.0850225877447004, + "grad_norm": 0.9019898176193237, + "learning_rate": 6.100106825231348e-05, + "loss": 0.1313, + "step": 54000 + }, + { + "epoch": 2.0854087030387274, + "grad_norm": 2.3572826385498047, + "learning_rate": 6.097532723271169e-05, + "loss": 0.219, + "step": 54010 + }, + { + "epoch": 2.0857948183327544, + "grad_norm": 0.6147291660308838, + "learning_rate": 6.0949586213109896e-05, + "loss": 0.1005, + "step": 54020 + }, + { + "epoch": 2.086180933626781, + "grad_norm": 0.3989221751689911, + "learning_rate": 6.092384519350812e-05, + "loss": 0.1923, + "step": 54030 + }, + { + "epoch": 2.086567048920808, + "grad_norm": 0.04854296147823334, + "learning_rate": 6.089810417390634e-05, + "loss": 0.3081, + "step": 54040 + }, + { + "epoch": 2.0869531642148345, + "grad_norm": 1.1020113229751587, + "learning_rate": 6.087236315430454e-05, + "loss": 0.1325, + "step": 54050 + }, + { + "epoch": 2.0873392795088614, + "grad_norm": 1.2404685020446777, + "learning_rate": 6.0846622134702756e-05, + "loss": 0.2657, + "step": 54060 + }, + { + "epoch": 2.087725394802888, + "grad_norm": 0.5539906620979309, + "learning_rate": 6.082088111510097e-05, + "loss": 0.1379, + "step": 54070 + }, + { + "epoch": 2.088111510096915, + "grad_norm": 0.550548791885376, + "learning_rate": 6.079514009549918e-05, + "loss": 0.2004, + "step": 54080 + }, + { + "epoch": 2.0884976253909415, + "grad_norm": 0.5012397766113281, + "learning_rate": 6.07693990758974e-05, + "loss": 0.1582, + "step": 54090 + }, + { + "epoch": 2.0888837406849685, + "grad_norm": 0.7319992780685425, + "learning_rate": 6.0743658056295617e-05, + "loss": 0.1923, + "step": 54100 + }, + { + "epoch": 2.0892698559789955, + "grad_norm": 0.05006573721766472, + "learning_rate": 6.071791703669383e-05, + "loss": 0.2457, + "step": 54110 + }, + { + "epoch": 2.089655971273022, + "grad_norm": 0.06044507771730423, + "learning_rate": 6.0692176017092036e-05, + "loss": 0.1328, + "step": 54120 + }, + { + "epoch": 2.090042086567049, + "grad_norm": 1.4530203342437744, + "learning_rate": 6.066643499749025e-05, + "loss": 0.1344, + "step": 54130 + }, + { + "epoch": 2.0904282018610756, + "grad_norm": 1.0805295705795288, + "learning_rate": 6.064069397788846e-05, + "loss": 0.137, + "step": 54140 + }, + { + "epoch": 2.0908143171551026, + "grad_norm": 0.7313231825828552, + "learning_rate": 6.061495295828668e-05, + "loss": 0.1859, + "step": 54150 + }, + { + "epoch": 2.091200432449129, + "grad_norm": 1.4634814262390137, + "learning_rate": 6.0589211938684896e-05, + "loss": 0.1396, + "step": 54160 + }, + { + "epoch": 2.091586547743156, + "grad_norm": 1.1281847953796387, + "learning_rate": 6.056347091908311e-05, + "loss": 0.1935, + "step": 54170 + }, + { + "epoch": 2.091972663037183, + "grad_norm": 0.3002813756465912, + "learning_rate": 6.053772989948132e-05, + "loss": 0.2053, + "step": 54180 + }, + { + "epoch": 2.0923587783312096, + "grad_norm": 1.521639347076416, + "learning_rate": 6.0511988879879536e-05, + "loss": 0.2228, + "step": 54190 + }, + { + "epoch": 2.0927448936252366, + "grad_norm": 1.6338810920715332, + "learning_rate": 6.048624786027774e-05, + "loss": 0.0525, + "step": 54200 + }, + { + "epoch": 2.093131008919263, + "grad_norm": 1.9877723455429077, + "learning_rate": 6.0460506840675956e-05, + "loss": 0.3764, + "step": 54210 + }, + { + "epoch": 2.09351712421329, + "grad_norm": 1.1026666164398193, + "learning_rate": 6.0434765821074176e-05, + "loss": 0.0845, + "step": 54220 + }, + { + "epoch": 2.0939032395073167, + "grad_norm": 0.17072628438472748, + "learning_rate": 6.040902480147239e-05, + "loss": 0.2619, + "step": 54230 + }, + { + "epoch": 2.0942893548013437, + "grad_norm": 1.0765973329544067, + "learning_rate": 6.03832837818706e-05, + "loss": 0.2227, + "step": 54240 + }, + { + "epoch": 2.0946754700953703, + "grad_norm": 0.10422563552856445, + "learning_rate": 6.0357542762268816e-05, + "loss": 0.1371, + "step": 54250 + }, + { + "epoch": 2.0950615853893972, + "grad_norm": 0.7437000870704651, + "learning_rate": 6.033180174266703e-05, + "loss": 0.0981, + "step": 54260 + }, + { + "epoch": 2.0954477006834242, + "grad_norm": 0.12045181542634964, + "learning_rate": 6.0306060723065236e-05, + "loss": 0.1583, + "step": 54270 + }, + { + "epoch": 2.0958338159774508, + "grad_norm": 0.5264570713043213, + "learning_rate": 6.028031970346346e-05, + "loss": 0.2561, + "step": 54280 + }, + { + "epoch": 2.0962199312714778, + "grad_norm": 0.8104095458984375, + "learning_rate": 6.0254578683861676e-05, + "loss": 0.1338, + "step": 54290 + }, + { + "epoch": 2.0966060465655043, + "grad_norm": 1.8734989166259766, + "learning_rate": 6.022883766425988e-05, + "loss": 0.1911, + "step": 54300 + }, + { + "epoch": 2.0969921618595313, + "grad_norm": 0.09730927646160126, + "learning_rate": 6.0203096644658096e-05, + "loss": 0.2272, + "step": 54310 + }, + { + "epoch": 2.097378277153558, + "grad_norm": 2.5745980739593506, + "learning_rate": 6.017735562505631e-05, + "loss": 0.2252, + "step": 54320 + }, + { + "epoch": 2.097764392447585, + "grad_norm": 0.4371737539768219, + "learning_rate": 6.015161460545452e-05, + "loss": 0.1397, + "step": 54330 + }, + { + "epoch": 2.098150507741612, + "grad_norm": 0.267517626285553, + "learning_rate": 6.012587358585274e-05, + "loss": 0.0745, + "step": 54340 + }, + { + "epoch": 2.0985366230356384, + "grad_norm": 0.8187986016273499, + "learning_rate": 6.0100132566250956e-05, + "loss": 0.2474, + "step": 54350 + }, + { + "epoch": 2.0989227383296654, + "grad_norm": 1.1416966915130615, + "learning_rate": 6.007439154664917e-05, + "loss": 0.1445, + "step": 54360 + }, + { + "epoch": 2.099308853623692, + "grad_norm": 0.4858175814151764, + "learning_rate": 6.0048650527047376e-05, + "loss": 0.1115, + "step": 54370 + }, + { + "epoch": 2.099694968917719, + "grad_norm": 0.17463591694831848, + "learning_rate": 6.002290950744559e-05, + "loss": 0.128, + "step": 54380 + }, + { + "epoch": 2.1000810842117454, + "grad_norm": 1.2394402027130127, + "learning_rate": 5.99971684878438e-05, + "loss": 0.2724, + "step": 54390 + }, + { + "epoch": 2.1004671995057724, + "grad_norm": 0.2638779282569885, + "learning_rate": 5.997142746824202e-05, + "loss": 0.0761, + "step": 54400 + }, + { + "epoch": 2.1008533147997994, + "grad_norm": 0.34836921095848083, + "learning_rate": 5.9945686448640236e-05, + "loss": 0.1303, + "step": 54410 + }, + { + "epoch": 2.101239430093826, + "grad_norm": 1.0886906385421753, + "learning_rate": 5.991994542903845e-05, + "loss": 0.1448, + "step": 54420 + }, + { + "epoch": 2.101625545387853, + "grad_norm": 1.4876662492752075, + "learning_rate": 5.989420440943666e-05, + "loss": 0.1355, + "step": 54430 + }, + { + "epoch": 2.1020116606818795, + "grad_norm": 0.5606863498687744, + "learning_rate": 5.986846338983487e-05, + "loss": 0.2719, + "step": 54440 + }, + { + "epoch": 2.1023977759759065, + "grad_norm": 2.005791664123535, + "learning_rate": 5.984272237023308e-05, + "loss": 0.2387, + "step": 54450 + }, + { + "epoch": 2.102783891269933, + "grad_norm": 1.1652408838272095, + "learning_rate": 5.9816981350631296e-05, + "loss": 0.2491, + "step": 54460 + }, + { + "epoch": 2.10317000656396, + "grad_norm": 1.9928478002548218, + "learning_rate": 5.9791240331029516e-05, + "loss": 0.2429, + "step": 54470 + }, + { + "epoch": 2.103556121857987, + "grad_norm": 1.4620413780212402, + "learning_rate": 5.976549931142773e-05, + "loss": 0.2706, + "step": 54480 + }, + { + "epoch": 2.1039422371520136, + "grad_norm": 0.40234237909317017, + "learning_rate": 5.973975829182594e-05, + "loss": 0.1181, + "step": 54490 + }, + { + "epoch": 2.1043283524460406, + "grad_norm": 0.2635735273361206, + "learning_rate": 5.9714017272224156e-05, + "loss": 0.1217, + "step": 54500 + }, + { + "epoch": 2.104714467740067, + "grad_norm": 0.5525489449501038, + "learning_rate": 5.968827625262237e-05, + "loss": 0.2949, + "step": 54510 + }, + { + "epoch": 2.105100583034094, + "grad_norm": 1.1089653968811035, + "learning_rate": 5.9662535233020576e-05, + "loss": 0.1418, + "step": 54520 + }, + { + "epoch": 2.1054866983281206, + "grad_norm": 0.21478118002414703, + "learning_rate": 5.96367942134188e-05, + "loss": 0.0913, + "step": 54530 + }, + { + "epoch": 2.1058728136221476, + "grad_norm": 1.0484806299209595, + "learning_rate": 5.9611053193817016e-05, + "loss": 0.1215, + "step": 54540 + }, + { + "epoch": 2.106258928916174, + "grad_norm": 2.487285852432251, + "learning_rate": 5.958531217421522e-05, + "loss": 0.2268, + "step": 54550 + }, + { + "epoch": 2.106645044210201, + "grad_norm": 5.35322904586792, + "learning_rate": 5.9559571154613436e-05, + "loss": 0.0894, + "step": 54560 + }, + { + "epoch": 2.107031159504228, + "grad_norm": 3.2365424633026123, + "learning_rate": 5.953383013501165e-05, + "loss": 0.3387, + "step": 54570 + }, + { + "epoch": 2.1074172747982547, + "grad_norm": 0.9013198614120483, + "learning_rate": 5.950808911540986e-05, + "loss": 0.103, + "step": 54580 + }, + { + "epoch": 2.1078033900922817, + "grad_norm": 0.7987234592437744, + "learning_rate": 5.948234809580808e-05, + "loss": 0.1964, + "step": 54590 + }, + { + "epoch": 2.1081895053863082, + "grad_norm": 0.6315350532531738, + "learning_rate": 5.9456607076206296e-05, + "loss": 0.1523, + "step": 54600 + }, + { + "epoch": 2.108575620680335, + "grad_norm": 2.4431264400482178, + "learning_rate": 5.943086605660451e-05, + "loss": 0.255, + "step": 54610 + }, + { + "epoch": 2.1089617359743618, + "grad_norm": 1.1002070903778076, + "learning_rate": 5.9405125037002715e-05, + "loss": 0.3092, + "step": 54620 + }, + { + "epoch": 2.1093478512683888, + "grad_norm": 1.1607320308685303, + "learning_rate": 5.937938401740093e-05, + "loss": 0.1338, + "step": 54630 + }, + { + "epoch": 2.1097339665624157, + "grad_norm": 1.9413435459136963, + "learning_rate": 5.935364299779914e-05, + "loss": 0.2128, + "step": 54640 + }, + { + "epoch": 2.1101200818564423, + "grad_norm": 1.6216448545455933, + "learning_rate": 5.932790197819736e-05, + "loss": 0.2688, + "step": 54650 + }, + { + "epoch": 2.1105061971504693, + "grad_norm": 0.9825085997581482, + "learning_rate": 5.9302160958595576e-05, + "loss": 0.146, + "step": 54660 + }, + { + "epoch": 2.110892312444496, + "grad_norm": 2.0620877742767334, + "learning_rate": 5.927641993899379e-05, + "loss": 0.1987, + "step": 54670 + }, + { + "epoch": 2.111278427738523, + "grad_norm": 0.6155973076820374, + "learning_rate": 5.9250678919392e-05, + "loss": 0.0886, + "step": 54680 + }, + { + "epoch": 2.1116645430325494, + "grad_norm": 0.08519631624221802, + "learning_rate": 5.922493789979021e-05, + "loss": 0.1087, + "step": 54690 + }, + { + "epoch": 2.1120506583265763, + "grad_norm": 1.9819930791854858, + "learning_rate": 5.919919688018842e-05, + "loss": 0.2588, + "step": 54700 + }, + { + "epoch": 2.112436773620603, + "grad_norm": 0.32515060901641846, + "learning_rate": 5.9173455860586635e-05, + "loss": 0.2297, + "step": 54710 + }, + { + "epoch": 2.11282288891463, + "grad_norm": 2.5351369380950928, + "learning_rate": 5.9147714840984855e-05, + "loss": 0.1391, + "step": 54720 + }, + { + "epoch": 2.113209004208657, + "grad_norm": 0.3489625155925751, + "learning_rate": 5.912197382138307e-05, + "loss": 0.1012, + "step": 54730 + }, + { + "epoch": 2.1135951195026834, + "grad_norm": 0.4030207693576813, + "learning_rate": 5.909623280178128e-05, + "loss": 0.2788, + "step": 54740 + }, + { + "epoch": 2.1139812347967104, + "grad_norm": 1.3358521461486816, + "learning_rate": 5.9070491782179495e-05, + "loss": 0.4126, + "step": 54750 + }, + { + "epoch": 2.114367350090737, + "grad_norm": 1.6924939155578613, + "learning_rate": 5.90447507625777e-05, + "loss": 0.1319, + "step": 54760 + }, + { + "epoch": 2.114753465384764, + "grad_norm": 0.08663685619831085, + "learning_rate": 5.9019009742975915e-05, + "loss": 0.3066, + "step": 54770 + }, + { + "epoch": 2.1151395806787905, + "grad_norm": 0.2018699198961258, + "learning_rate": 5.899326872337414e-05, + "loss": 0.1316, + "step": 54780 + }, + { + "epoch": 2.1155256959728175, + "grad_norm": 1.8767002820968628, + "learning_rate": 5.896752770377235e-05, + "loss": 0.2638, + "step": 54790 + }, + { + "epoch": 2.1159118112668445, + "grad_norm": 2.719196081161499, + "learning_rate": 5.894178668417056e-05, + "loss": 0.166, + "step": 54800 + }, + { + "epoch": 2.116297926560871, + "grad_norm": 1.5541603565216064, + "learning_rate": 5.8916045664568775e-05, + "loss": 0.1502, + "step": 54810 + }, + { + "epoch": 2.116684041854898, + "grad_norm": 2.0174572467803955, + "learning_rate": 5.889030464496699e-05, + "loss": 0.2778, + "step": 54820 + }, + { + "epoch": 2.1170701571489245, + "grad_norm": 1.4575814008712769, + "learning_rate": 5.88645636253652e-05, + "loss": 0.2026, + "step": 54830 + }, + { + "epoch": 2.1174562724429515, + "grad_norm": 2.583587646484375, + "learning_rate": 5.883882260576342e-05, + "loss": 0.1695, + "step": 54840 + }, + { + "epoch": 2.117842387736978, + "grad_norm": 1.3352335691452026, + "learning_rate": 5.8813081586161635e-05, + "loss": 0.2557, + "step": 54850 + }, + { + "epoch": 2.118228503031005, + "grad_norm": 1.0539675951004028, + "learning_rate": 5.878734056655985e-05, + "loss": 0.2358, + "step": 54860 + }, + { + "epoch": 2.118614618325032, + "grad_norm": 1.6472233533859253, + "learning_rate": 5.8761599546958055e-05, + "loss": 0.1166, + "step": 54870 + }, + { + "epoch": 2.1190007336190586, + "grad_norm": 1.6128703355789185, + "learning_rate": 5.873585852735627e-05, + "loss": 0.1584, + "step": 54880 + }, + { + "epoch": 2.1193868489130856, + "grad_norm": 2.4982826709747314, + "learning_rate": 5.871011750775448e-05, + "loss": 0.2192, + "step": 54890 + }, + { + "epoch": 2.119772964207112, + "grad_norm": 1.7372159957885742, + "learning_rate": 5.86843764881527e-05, + "loss": 0.1824, + "step": 54900 + }, + { + "epoch": 2.120159079501139, + "grad_norm": 1.9102532863616943, + "learning_rate": 5.8658635468550915e-05, + "loss": 0.1234, + "step": 54910 + }, + { + "epoch": 2.1205451947951657, + "grad_norm": 0.9978908896446228, + "learning_rate": 5.863289444894913e-05, + "loss": 0.2699, + "step": 54920 + }, + { + "epoch": 2.1209313100891927, + "grad_norm": 0.8557146787643433, + "learning_rate": 5.860715342934734e-05, + "loss": 0.0984, + "step": 54930 + }, + { + "epoch": 2.1213174253832197, + "grad_norm": 2.5358450412750244, + "learning_rate": 5.858141240974555e-05, + "loss": 0.2159, + "step": 54940 + }, + { + "epoch": 2.121703540677246, + "grad_norm": 2.588324785232544, + "learning_rate": 5.855567139014376e-05, + "loss": 0.1311, + "step": 54950 + }, + { + "epoch": 2.122089655971273, + "grad_norm": 3.9097461700439453, + "learning_rate": 5.852993037054199e-05, + "loss": 0.2061, + "step": 54960 + }, + { + "epoch": 2.1224757712652997, + "grad_norm": 0.992247998714447, + "learning_rate": 5.8504189350940195e-05, + "loss": 0.0774, + "step": 54970 + }, + { + "epoch": 2.1228618865593267, + "grad_norm": 0.9253148436546326, + "learning_rate": 5.847844833133841e-05, + "loss": 0.1274, + "step": 54980 + }, + { + "epoch": 2.1232480018533533, + "grad_norm": 0.8236201405525208, + "learning_rate": 5.845270731173662e-05, + "loss": 0.0951, + "step": 54990 + }, + { + "epoch": 2.1236341171473803, + "grad_norm": 0.9370753169059753, + "learning_rate": 5.8426966292134835e-05, + "loss": 0.1832, + "step": 55000 + }, + { + "epoch": 2.124020232441407, + "grad_norm": 2.7415149211883545, + "learning_rate": 5.840122527253304e-05, + "loss": 0.1798, + "step": 55010 + }, + { + "epoch": 2.124406347735434, + "grad_norm": 1.4576952457427979, + "learning_rate": 5.8375484252931255e-05, + "loss": 0.2384, + "step": 55020 + }, + { + "epoch": 2.124792463029461, + "grad_norm": 2.303542137145996, + "learning_rate": 5.834974323332948e-05, + "loss": 0.2166, + "step": 55030 + }, + { + "epoch": 2.1251785783234873, + "grad_norm": 1.1065007448196411, + "learning_rate": 5.832400221372769e-05, + "loss": 0.1723, + "step": 55040 + }, + { + "epoch": 2.1255646936175143, + "grad_norm": 4.247042655944824, + "learning_rate": 5.82982611941259e-05, + "loss": 0.3005, + "step": 55050 + }, + { + "epoch": 2.125950808911541, + "grad_norm": 1.3860160112380981, + "learning_rate": 5.8272520174524115e-05, + "loss": 0.2504, + "step": 55060 + }, + { + "epoch": 2.126336924205568, + "grad_norm": 0.2923658788204193, + "learning_rate": 5.824677915492233e-05, + "loss": 0.1933, + "step": 55070 + }, + { + "epoch": 2.1267230394995944, + "grad_norm": 2.0818326473236084, + "learning_rate": 5.822103813532054e-05, + "loss": 0.2383, + "step": 55080 + }, + { + "epoch": 2.1271091547936214, + "grad_norm": 2.5381951332092285, + "learning_rate": 5.819529711571876e-05, + "loss": 0.1791, + "step": 55090 + }, + { + "epoch": 2.127495270087648, + "grad_norm": 0.7398497462272644, + "learning_rate": 5.8169556096116975e-05, + "loss": 0.1255, + "step": 55100 + }, + { + "epoch": 2.127881385381675, + "grad_norm": 1.7903372049331665, + "learning_rate": 5.814381507651518e-05, + "loss": 0.2066, + "step": 55110 + }, + { + "epoch": 2.128267500675702, + "grad_norm": 0.8950181007385254, + "learning_rate": 5.8118074056913395e-05, + "loss": 0.2158, + "step": 55120 + }, + { + "epoch": 2.1286536159697285, + "grad_norm": 0.39259612560272217, + "learning_rate": 5.809233303731161e-05, + "loss": 0.0752, + "step": 55130 + }, + { + "epoch": 2.1290397312637555, + "grad_norm": 1.3042824268341064, + "learning_rate": 5.806659201770982e-05, + "loss": 0.2185, + "step": 55140 + }, + { + "epoch": 2.129425846557782, + "grad_norm": 2.0668983459472656, + "learning_rate": 5.804085099810804e-05, + "loss": 0.1751, + "step": 55150 + }, + { + "epoch": 2.129811961851809, + "grad_norm": 0.634894609451294, + "learning_rate": 5.8015109978506255e-05, + "loss": 0.1275, + "step": 55160 + }, + { + "epoch": 2.1301980771458355, + "grad_norm": 0.9348855018615723, + "learning_rate": 5.798936895890447e-05, + "loss": 0.2011, + "step": 55170 + }, + { + "epoch": 2.1305841924398625, + "grad_norm": 0.686715841293335, + "learning_rate": 5.796362793930268e-05, + "loss": 0.1728, + "step": 55180 + }, + { + "epoch": 2.1309703077338895, + "grad_norm": 2.238306999206543, + "learning_rate": 5.793788691970089e-05, + "loss": 0.1944, + "step": 55190 + }, + { + "epoch": 2.131356423027916, + "grad_norm": 0.7057651281356812, + "learning_rate": 5.79121459000991e-05, + "loss": 0.1892, + "step": 55200 + }, + { + "epoch": 2.131742538321943, + "grad_norm": 2.050670862197876, + "learning_rate": 5.788640488049733e-05, + "loss": 0.1539, + "step": 55210 + }, + { + "epoch": 2.1321286536159696, + "grad_norm": 1.924126386642456, + "learning_rate": 5.7860663860895534e-05, + "loss": 0.3623, + "step": 55220 + }, + { + "epoch": 2.1325147689099966, + "grad_norm": 0.32087692618370056, + "learning_rate": 5.783492284129375e-05, + "loss": 0.1179, + "step": 55230 + }, + { + "epoch": 2.132900884204023, + "grad_norm": 2.266429901123047, + "learning_rate": 5.780918182169196e-05, + "loss": 0.1164, + "step": 55240 + }, + { + "epoch": 2.13328699949805, + "grad_norm": 0.5455263257026672, + "learning_rate": 5.7783440802090174e-05, + "loss": 0.1361, + "step": 55250 + }, + { + "epoch": 2.133673114792077, + "grad_norm": 0.6196660399436951, + "learning_rate": 5.775769978248838e-05, + "loss": 0.1986, + "step": 55260 + }, + { + "epoch": 2.1340592300861037, + "grad_norm": 0.4529377222061157, + "learning_rate": 5.7731958762886594e-05, + "loss": 0.3009, + "step": 55270 + }, + { + "epoch": 2.1344453453801306, + "grad_norm": 0.44304555654525757, + "learning_rate": 5.770621774328482e-05, + "loss": 0.2522, + "step": 55280 + }, + { + "epoch": 2.134831460674157, + "grad_norm": 2.3878743648529053, + "learning_rate": 5.768047672368303e-05, + "loss": 0.1762, + "step": 55290 + }, + { + "epoch": 2.135217575968184, + "grad_norm": 1.3650730848312378, + "learning_rate": 5.765473570408124e-05, + "loss": 0.1596, + "step": 55300 + }, + { + "epoch": 2.1356036912622107, + "grad_norm": 1.0818227529525757, + "learning_rate": 5.7628994684479454e-05, + "loss": 0.0982, + "step": 55310 + }, + { + "epoch": 2.1359898065562377, + "grad_norm": 0.639480710029602, + "learning_rate": 5.760325366487767e-05, + "loss": 0.1165, + "step": 55320 + }, + { + "epoch": 2.1363759218502647, + "grad_norm": 0.9253720641136169, + "learning_rate": 5.7577512645275874e-05, + "loss": 0.3411, + "step": 55330 + }, + { + "epoch": 2.1367620371442912, + "grad_norm": 1.2035890817642212, + "learning_rate": 5.75517716256741e-05, + "loss": 0.2002, + "step": 55340 + }, + { + "epoch": 2.1371481524383182, + "grad_norm": 1.8806023597717285, + "learning_rate": 5.7526030606072314e-05, + "loss": 0.1834, + "step": 55350 + }, + { + "epoch": 2.137534267732345, + "grad_norm": 1.9944851398468018, + "learning_rate": 5.750028958647052e-05, + "loss": 0.2019, + "step": 55360 + }, + { + "epoch": 2.1379203830263718, + "grad_norm": 0.5148534774780273, + "learning_rate": 5.7474548566868734e-05, + "loss": 0.263, + "step": 55370 + }, + { + "epoch": 2.1383064983203983, + "grad_norm": 1.1325627565383911, + "learning_rate": 5.744880754726695e-05, + "loss": 0.1479, + "step": 55380 + }, + { + "epoch": 2.1386926136144253, + "grad_norm": 0.9628505706787109, + "learning_rate": 5.742306652766516e-05, + "loss": 0.1216, + "step": 55390 + }, + { + "epoch": 2.1390787289084523, + "grad_norm": 0.1893000602722168, + "learning_rate": 5.739732550806338e-05, + "loss": 0.161, + "step": 55400 + }, + { + "epoch": 2.139464844202479, + "grad_norm": 0.9227676391601562, + "learning_rate": 5.7371584488461594e-05, + "loss": 0.1336, + "step": 55410 + }, + { + "epoch": 2.139850959496506, + "grad_norm": 0.3534090220928192, + "learning_rate": 5.734584346885981e-05, + "loss": 0.1184, + "step": 55420 + }, + { + "epoch": 2.1402370747905324, + "grad_norm": 0.9937344193458557, + "learning_rate": 5.7320102449258014e-05, + "loss": 0.1053, + "step": 55430 + }, + { + "epoch": 2.1406231900845594, + "grad_norm": 1.7365370988845825, + "learning_rate": 5.729436142965623e-05, + "loss": 0.1863, + "step": 55440 + }, + { + "epoch": 2.141009305378586, + "grad_norm": 1.2345154285430908, + "learning_rate": 5.726862041005444e-05, + "loss": 0.3243, + "step": 55450 + }, + { + "epoch": 2.141395420672613, + "grad_norm": 0.36491262912750244, + "learning_rate": 5.724287939045266e-05, + "loss": 0.3046, + "step": 55460 + }, + { + "epoch": 2.1417815359666394, + "grad_norm": 0.6416808366775513, + "learning_rate": 5.7217138370850874e-05, + "loss": 0.1608, + "step": 55470 + }, + { + "epoch": 2.1421676512606664, + "grad_norm": 1.3553400039672852, + "learning_rate": 5.719139735124909e-05, + "loss": 0.2223, + "step": 55480 + }, + { + "epoch": 2.1425537665546934, + "grad_norm": 1.049273133277893, + "learning_rate": 5.71656563316473e-05, + "loss": 0.1196, + "step": 55490 + }, + { + "epoch": 2.14293988184872, + "grad_norm": 0.8493034243583679, + "learning_rate": 5.7139915312045514e-05, + "loss": 0.2165, + "step": 55500 + }, + { + "epoch": 2.143325997142747, + "grad_norm": 0.6411147117614746, + "learning_rate": 5.711417429244372e-05, + "loss": 0.1442, + "step": 55510 + }, + { + "epoch": 2.1437121124367735, + "grad_norm": 0.7366828322410583, + "learning_rate": 5.7088433272841934e-05, + "loss": 0.3625, + "step": 55520 + }, + { + "epoch": 2.1440982277308005, + "grad_norm": 0.7757991552352905, + "learning_rate": 5.706269225324016e-05, + "loss": 0.1393, + "step": 55530 + }, + { + "epoch": 2.144484343024827, + "grad_norm": 1.496213436126709, + "learning_rate": 5.703695123363837e-05, + "loss": 0.2351, + "step": 55540 + }, + { + "epoch": 2.144870458318854, + "grad_norm": 1.1395788192749023, + "learning_rate": 5.701121021403658e-05, + "loss": 0.1933, + "step": 55550 + }, + { + "epoch": 2.1452565736128806, + "grad_norm": 3.914621353149414, + "learning_rate": 5.6985469194434794e-05, + "loss": 0.2532, + "step": 55560 + }, + { + "epoch": 2.1456426889069076, + "grad_norm": 2.1900668144226074, + "learning_rate": 5.695972817483301e-05, + "loss": 0.2192, + "step": 55570 + }, + { + "epoch": 2.1460288042009346, + "grad_norm": 2.3883585929870605, + "learning_rate": 5.6933987155231214e-05, + "loss": 0.2819, + "step": 55580 + }, + { + "epoch": 2.146414919494961, + "grad_norm": 0.28431379795074463, + "learning_rate": 5.690824613562944e-05, + "loss": 0.2381, + "step": 55590 + }, + { + "epoch": 2.146801034788988, + "grad_norm": 0.39490944147109985, + "learning_rate": 5.6882505116027654e-05, + "loss": 0.1224, + "step": 55600 + }, + { + "epoch": 2.1471871500830146, + "grad_norm": 0.5814546346664429, + "learning_rate": 5.685676409642586e-05, + "loss": 0.0878, + "step": 55610 + }, + { + "epoch": 2.1475732653770416, + "grad_norm": 3.863250255584717, + "learning_rate": 5.6831023076824074e-05, + "loss": 0.2972, + "step": 55620 + }, + { + "epoch": 2.147959380671068, + "grad_norm": 1.5579304695129395, + "learning_rate": 5.680528205722229e-05, + "loss": 0.2035, + "step": 55630 + }, + { + "epoch": 2.148345495965095, + "grad_norm": 1.3953123092651367, + "learning_rate": 5.67795410376205e-05, + "loss": 0.2039, + "step": 55640 + }, + { + "epoch": 2.148731611259122, + "grad_norm": 0.30549386143684387, + "learning_rate": 5.675380001801872e-05, + "loss": 0.1311, + "step": 55650 + }, + { + "epoch": 2.1491177265531487, + "grad_norm": 1.704403281211853, + "learning_rate": 5.6728058998416934e-05, + "loss": 0.1683, + "step": 55660 + }, + { + "epoch": 2.1495038418471757, + "grad_norm": 0.47322070598602295, + "learning_rate": 5.670231797881515e-05, + "loss": 0.0995, + "step": 55670 + }, + { + "epoch": 2.1498899571412022, + "grad_norm": 0.8710082769393921, + "learning_rate": 5.6676576959213354e-05, + "loss": 0.1737, + "step": 55680 + }, + { + "epoch": 2.1502760724352292, + "grad_norm": 3.141096353530884, + "learning_rate": 5.665083593961157e-05, + "loss": 0.1693, + "step": 55690 + }, + { + "epoch": 2.1506621877292558, + "grad_norm": 1.8428922891616821, + "learning_rate": 5.662509492000978e-05, + "loss": 0.0868, + "step": 55700 + }, + { + "epoch": 2.1510483030232828, + "grad_norm": 0.3056959807872772, + "learning_rate": 5.6599353900408e-05, + "loss": 0.2256, + "step": 55710 + }, + { + "epoch": 2.1514344183173097, + "grad_norm": 0.14365683495998383, + "learning_rate": 5.6573612880806214e-05, + "loss": 0.2421, + "step": 55720 + }, + { + "epoch": 2.1518205336113363, + "grad_norm": 0.34138041734695435, + "learning_rate": 5.654787186120443e-05, + "loss": 0.2089, + "step": 55730 + }, + { + "epoch": 2.1522066489053633, + "grad_norm": 1.2336843013763428, + "learning_rate": 5.652213084160264e-05, + "loss": 0.2282, + "step": 55740 + }, + { + "epoch": 2.15259276419939, + "grad_norm": 0.121715247631073, + "learning_rate": 5.6496389822000854e-05, + "loss": 0.341, + "step": 55750 + }, + { + "epoch": 2.152978879493417, + "grad_norm": 1.271396279335022, + "learning_rate": 5.647064880239906e-05, + "loss": 0.346, + "step": 55760 + }, + { + "epoch": 2.1533649947874434, + "grad_norm": 0.39105209708213806, + "learning_rate": 5.6444907782797273e-05, + "loss": 0.2369, + "step": 55770 + }, + { + "epoch": 2.1537511100814704, + "grad_norm": 2.396703004837036, + "learning_rate": 5.6419166763195493e-05, + "loss": 0.1595, + "step": 55780 + }, + { + "epoch": 2.1541372253754973, + "grad_norm": 1.4576066732406616, + "learning_rate": 5.639342574359371e-05, + "loss": 0.1757, + "step": 55790 + }, + { + "epoch": 2.154523340669524, + "grad_norm": 0.5846558213233948, + "learning_rate": 5.636768472399192e-05, + "loss": 0.0961, + "step": 55800 + }, + { + "epoch": 2.154909455963551, + "grad_norm": 0.2891974449157715, + "learning_rate": 5.6341943704390133e-05, + "loss": 0.1381, + "step": 55810 + }, + { + "epoch": 2.1552955712575774, + "grad_norm": 1.4091805219650269, + "learning_rate": 5.631620268478835e-05, + "loss": 0.1934, + "step": 55820 + }, + { + "epoch": 2.1556816865516044, + "grad_norm": 0.5410944223403931, + "learning_rate": 5.629046166518655e-05, + "loss": 0.2414, + "step": 55830 + }, + { + "epoch": 2.156067801845631, + "grad_norm": 0.7418326735496521, + "learning_rate": 5.626472064558478e-05, + "loss": 0.0405, + "step": 55840 + }, + { + "epoch": 2.156453917139658, + "grad_norm": 0.03413806110620499, + "learning_rate": 5.6238979625982993e-05, + "loss": 0.1131, + "step": 55850 + }, + { + "epoch": 2.1568400324336845, + "grad_norm": 0.11657452583312988, + "learning_rate": 5.62132386063812e-05, + "loss": 0.2259, + "step": 55860 + }, + { + "epoch": 2.1572261477277115, + "grad_norm": 0.9248818755149841, + "learning_rate": 5.618749758677941e-05, + "loss": 0.1091, + "step": 55870 + }, + { + "epoch": 2.1576122630217385, + "grad_norm": 0.19540861248970032, + "learning_rate": 5.6161756567177627e-05, + "loss": 0.1831, + "step": 55880 + }, + { + "epoch": 2.157998378315765, + "grad_norm": 1.0880403518676758, + "learning_rate": 5.613601554757584e-05, + "loss": 0.2051, + "step": 55890 + }, + { + "epoch": 2.158384493609792, + "grad_norm": 0.766243577003479, + "learning_rate": 5.611027452797406e-05, + "loss": 0.123, + "step": 55900 + }, + { + "epoch": 2.1587706089038186, + "grad_norm": 0.7406583428382874, + "learning_rate": 5.608453350837227e-05, + "loss": 0.1137, + "step": 55910 + }, + { + "epoch": 2.1591567241978455, + "grad_norm": 0.5550261735916138, + "learning_rate": 5.605879248877049e-05, + "loss": 0.1978, + "step": 55920 + }, + { + "epoch": 2.159542839491872, + "grad_norm": 1.202231526374817, + "learning_rate": 5.603305146916869e-05, + "loss": 0.2157, + "step": 55930 + }, + { + "epoch": 2.159928954785899, + "grad_norm": 0.7214229702949524, + "learning_rate": 5.6007310449566906e-05, + "loss": 0.3131, + "step": 55940 + }, + { + "epoch": 2.160315070079926, + "grad_norm": 0.6656380295753479, + "learning_rate": 5.598156942996512e-05, + "loss": 0.1293, + "step": 55950 + }, + { + "epoch": 2.1607011853739526, + "grad_norm": 0.15865078568458557, + "learning_rate": 5.595582841036334e-05, + "loss": 0.045, + "step": 55960 + }, + { + "epoch": 2.1610873006679796, + "grad_norm": 0.6749983429908752, + "learning_rate": 5.593008739076155e-05, + "loss": 0.1891, + "step": 55970 + }, + { + "epoch": 2.161473415962006, + "grad_norm": 1.1434985399246216, + "learning_rate": 5.5904346371159766e-05, + "loss": 0.0775, + "step": 55980 + }, + { + "epoch": 2.161859531256033, + "grad_norm": 1.1395485401153564, + "learning_rate": 5.587860535155798e-05, + "loss": 0.1444, + "step": 55990 + }, + { + "epoch": 2.1622456465500597, + "grad_norm": 0.44319289922714233, + "learning_rate": 5.5852864331956186e-05, + "loss": 0.2506, + "step": 56000 + }, + { + "epoch": 2.1626317618440867, + "grad_norm": 0.9017069935798645, + "learning_rate": 5.58271233123544e-05, + "loss": 0.2104, + "step": 56010 + }, + { + "epoch": 2.163017877138113, + "grad_norm": 2.420107126235962, + "learning_rate": 5.580138229275261e-05, + "loss": 0.2541, + "step": 56020 + }, + { + "epoch": 2.16340399243214, + "grad_norm": 0.5543047785758972, + "learning_rate": 5.577564127315083e-05, + "loss": 0.1408, + "step": 56030 + }, + { + "epoch": 2.163790107726167, + "grad_norm": 0.8099603652954102, + "learning_rate": 5.5749900253549046e-05, + "loss": 0.0781, + "step": 56040 + }, + { + "epoch": 2.1641762230201937, + "grad_norm": 0.972820520401001, + "learning_rate": 5.572415923394726e-05, + "loss": 0.0957, + "step": 56050 + }, + { + "epoch": 2.1645623383142207, + "grad_norm": 0.5426781177520752, + "learning_rate": 5.569841821434547e-05, + "loss": 0.1632, + "step": 56060 + }, + { + "epoch": 2.1649484536082473, + "grad_norm": 2.088747501373291, + "learning_rate": 5.5672677194743686e-05, + "loss": 0.143, + "step": 56070 + }, + { + "epoch": 2.1653345689022743, + "grad_norm": 0.3575989007949829, + "learning_rate": 5.564693617514189e-05, + "loss": 0.2365, + "step": 56080 + }, + { + "epoch": 2.165720684196301, + "grad_norm": 1.2159044742584229, + "learning_rate": 5.562119515554012e-05, + "loss": 0.2034, + "step": 56090 + }, + { + "epoch": 2.166106799490328, + "grad_norm": 1.2994232177734375, + "learning_rate": 5.559545413593833e-05, + "loss": 0.074, + "step": 56100 + }, + { + "epoch": 2.166492914784355, + "grad_norm": 0.2585364878177643, + "learning_rate": 5.556971311633654e-05, + "loss": 0.1147, + "step": 56110 + }, + { + "epoch": 2.1668790300783813, + "grad_norm": 0.18736127018928528, + "learning_rate": 5.554397209673475e-05, + "loss": 0.0895, + "step": 56120 + }, + { + "epoch": 2.1672651453724083, + "grad_norm": 0.43447959423065186, + "learning_rate": 5.5518231077132966e-05, + "loss": 0.1981, + "step": 56130 + }, + { + "epoch": 2.167651260666435, + "grad_norm": 1.2077672481536865, + "learning_rate": 5.549249005753118e-05, + "loss": 0.1432, + "step": 56140 + }, + { + "epoch": 2.168037375960462, + "grad_norm": 1.672919511795044, + "learning_rate": 5.54667490379294e-05, + "loss": 0.1381, + "step": 56150 + }, + { + "epoch": 2.1684234912544884, + "grad_norm": 0.6553566455841064, + "learning_rate": 5.544100801832761e-05, + "loss": 0.1089, + "step": 56160 + }, + { + "epoch": 2.1688096065485154, + "grad_norm": 0.4206780791282654, + "learning_rate": 5.5415266998725826e-05, + "loss": 0.1392, + "step": 56170 + }, + { + "epoch": 2.1691957218425424, + "grad_norm": 1.482874870300293, + "learning_rate": 5.538952597912403e-05, + "loss": 0.2027, + "step": 56180 + }, + { + "epoch": 2.169581837136569, + "grad_norm": 1.802695631980896, + "learning_rate": 5.5363784959522246e-05, + "loss": 0.2954, + "step": 56190 + }, + { + "epoch": 2.169967952430596, + "grad_norm": 0.7268577218055725, + "learning_rate": 5.533804393992046e-05, + "loss": 0.182, + "step": 56200 + }, + { + "epoch": 2.1703540677246225, + "grad_norm": 0.604767918586731, + "learning_rate": 5.531230292031868e-05, + "loss": 0.1247, + "step": 56210 + }, + { + "epoch": 2.1707401830186495, + "grad_norm": 2.211203098297119, + "learning_rate": 5.528656190071689e-05, + "loss": 0.2143, + "step": 56220 + }, + { + "epoch": 2.171126298312676, + "grad_norm": 0.03701888397336006, + "learning_rate": 5.5260820881115106e-05, + "loss": 0.2246, + "step": 56230 + }, + { + "epoch": 2.171512413606703, + "grad_norm": 3.4111924171447754, + "learning_rate": 5.523507986151332e-05, + "loss": 0.3147, + "step": 56240 + }, + { + "epoch": 2.17189852890073, + "grad_norm": 0.509873628616333, + "learning_rate": 5.5209338841911526e-05, + "loss": 0.1482, + "step": 56250 + }, + { + "epoch": 2.1722846441947565, + "grad_norm": 1.0144810676574707, + "learning_rate": 5.518359782230974e-05, + "loss": 0.161, + "step": 56260 + }, + { + "epoch": 2.1726707594887835, + "grad_norm": 1.7236958742141724, + "learning_rate": 5.5157856802707966e-05, + "loss": 0.2863, + "step": 56270 + }, + { + "epoch": 2.17305687478281, + "grad_norm": 2.028493881225586, + "learning_rate": 5.513211578310617e-05, + "loss": 0.1263, + "step": 56280 + }, + { + "epoch": 2.173442990076837, + "grad_norm": 0.18114915490150452, + "learning_rate": 5.5106374763504386e-05, + "loss": 0.1941, + "step": 56290 + }, + { + "epoch": 2.1738291053708636, + "grad_norm": 2.091604471206665, + "learning_rate": 5.50806337439026e-05, + "loss": 0.1418, + "step": 56300 + }, + { + "epoch": 2.1742152206648906, + "grad_norm": 3.5671277046203613, + "learning_rate": 5.505489272430081e-05, + "loss": 0.1645, + "step": 56310 + }, + { + "epoch": 2.174601335958917, + "grad_norm": 2.093780040740967, + "learning_rate": 5.502915170469902e-05, + "loss": 0.2723, + "step": 56320 + }, + { + "epoch": 2.174987451252944, + "grad_norm": 1.0060350894927979, + "learning_rate": 5.500341068509723e-05, + "loss": 0.1432, + "step": 56330 + }, + { + "epoch": 2.175373566546971, + "grad_norm": 1.280118465423584, + "learning_rate": 5.497766966549546e-05, + "loss": 0.2152, + "step": 56340 + }, + { + "epoch": 2.1757596818409977, + "grad_norm": 0.14088940620422363, + "learning_rate": 5.4951928645893666e-05, + "loss": 0.2147, + "step": 56350 + }, + { + "epoch": 2.1761457971350247, + "grad_norm": 1.0671783685684204, + "learning_rate": 5.492618762629188e-05, + "loss": 0.1269, + "step": 56360 + }, + { + "epoch": 2.176531912429051, + "grad_norm": 0.20585323870182037, + "learning_rate": 5.490044660669009e-05, + "loss": 0.1455, + "step": 56370 + }, + { + "epoch": 2.176918027723078, + "grad_norm": 1.8759623765945435, + "learning_rate": 5.4874705587088306e-05, + "loss": 0.3711, + "step": 56380 + }, + { + "epoch": 2.1773041430171047, + "grad_norm": 1.1874949932098389, + "learning_rate": 5.484896456748652e-05, + "loss": 0.1062, + "step": 56390 + }, + { + "epoch": 2.1776902583111317, + "grad_norm": 1.0083370208740234, + "learning_rate": 5.482322354788474e-05, + "loss": 0.1121, + "step": 56400 + }, + { + "epoch": 2.1780763736051583, + "grad_norm": 0.7510607838630676, + "learning_rate": 5.479748252828295e-05, + "loss": 0.2032, + "step": 56410 + }, + { + "epoch": 2.1784624888991853, + "grad_norm": 0.307444304227829, + "learning_rate": 5.4771741508681166e-05, + "loss": 0.1786, + "step": 56420 + }, + { + "epoch": 2.1788486041932122, + "grad_norm": 0.1072758212685585, + "learning_rate": 5.474600048907937e-05, + "loss": 0.3205, + "step": 56430 + }, + { + "epoch": 2.179234719487239, + "grad_norm": 0.78147292137146, + "learning_rate": 5.4720259469477586e-05, + "loss": 0.1552, + "step": 56440 + }, + { + "epoch": 2.179620834781266, + "grad_norm": 0.4287649393081665, + "learning_rate": 5.46945184498758e-05, + "loss": 0.0457, + "step": 56450 + }, + { + "epoch": 2.1800069500752923, + "grad_norm": 2.8946595191955566, + "learning_rate": 5.466877743027402e-05, + "loss": 0.1814, + "step": 56460 + }, + { + "epoch": 2.1803930653693193, + "grad_norm": 0.446044385433197, + "learning_rate": 5.464303641067223e-05, + "loss": 0.1898, + "step": 56470 + }, + { + "epoch": 2.180779180663346, + "grad_norm": 2.351010799407959, + "learning_rate": 5.4617295391070446e-05, + "loss": 0.1929, + "step": 56480 + }, + { + "epoch": 2.181165295957373, + "grad_norm": 1.1475882530212402, + "learning_rate": 5.459155437146866e-05, + "loss": 0.0972, + "step": 56490 + }, + { + "epoch": 2.1815514112514, + "grad_norm": 1.1613543033599854, + "learning_rate": 5.4565813351866865e-05, + "loss": 0.1397, + "step": 56500 + }, + { + "epoch": 2.1819375265454264, + "grad_norm": 1.2021968364715576, + "learning_rate": 5.454007233226508e-05, + "loss": 0.2538, + "step": 56510 + }, + { + "epoch": 2.1823236418394534, + "grad_norm": 1.1156634092330933, + "learning_rate": 5.4514331312663306e-05, + "loss": 0.1325, + "step": 56520 + }, + { + "epoch": 2.18270975713348, + "grad_norm": 3.0149824619293213, + "learning_rate": 5.448859029306151e-05, + "loss": 0.1596, + "step": 56530 + }, + { + "epoch": 2.183095872427507, + "grad_norm": 2.628236770629883, + "learning_rate": 5.4462849273459725e-05, + "loss": 0.3042, + "step": 56540 + }, + { + "epoch": 2.1834819877215335, + "grad_norm": 3.887352705001831, + "learning_rate": 5.443710825385794e-05, + "loss": 0.2112, + "step": 56550 + }, + { + "epoch": 2.1838681030155604, + "grad_norm": 1.9219342470169067, + "learning_rate": 5.441136723425615e-05, + "loss": 0.1709, + "step": 56560 + }, + { + "epoch": 2.1842542183095874, + "grad_norm": 1.5730615854263306, + "learning_rate": 5.438562621465436e-05, + "loss": 0.184, + "step": 56570 + }, + { + "epoch": 2.184640333603614, + "grad_norm": 1.306178331375122, + "learning_rate": 5.435988519505257e-05, + "loss": 0.2147, + "step": 56580 + }, + { + "epoch": 2.185026448897641, + "grad_norm": 0.3093883693218231, + "learning_rate": 5.43341441754508e-05, + "loss": 0.189, + "step": 56590 + }, + { + "epoch": 2.1854125641916675, + "grad_norm": 1.672884225845337, + "learning_rate": 5.4308403155849005e-05, + "loss": 0.1502, + "step": 56600 + }, + { + "epoch": 2.1857986794856945, + "grad_norm": 0.44754695892333984, + "learning_rate": 5.428266213624722e-05, + "loss": 0.1941, + "step": 56610 + }, + { + "epoch": 2.186184794779721, + "grad_norm": 0.3943333625793457, + "learning_rate": 5.425692111664543e-05, + "loss": 0.1494, + "step": 56620 + }, + { + "epoch": 2.186570910073748, + "grad_norm": 1.8399711847305298, + "learning_rate": 5.4231180097043645e-05, + "loss": 0.2462, + "step": 56630 + }, + { + "epoch": 2.186957025367775, + "grad_norm": 0.7934846878051758, + "learning_rate": 5.420543907744186e-05, + "loss": 0.1305, + "step": 56640 + }, + { + "epoch": 2.1873431406618016, + "grad_norm": 3.170630693435669, + "learning_rate": 5.417969805784008e-05, + "loss": 0.1825, + "step": 56650 + }, + { + "epoch": 2.1877292559558286, + "grad_norm": 1.4730361700057983, + "learning_rate": 5.415395703823829e-05, + "loss": 0.1929, + "step": 56660 + }, + { + "epoch": 2.188115371249855, + "grad_norm": 0.38193902373313904, + "learning_rate": 5.41282160186365e-05, + "loss": 0.2001, + "step": 56670 + }, + { + "epoch": 2.188501486543882, + "grad_norm": 1.1314163208007812, + "learning_rate": 5.410247499903471e-05, + "loss": 0.1913, + "step": 56680 + }, + { + "epoch": 2.1888876018379086, + "grad_norm": 0.40177929401397705, + "learning_rate": 5.4076733979432925e-05, + "loss": 0.1646, + "step": 56690 + }, + { + "epoch": 2.1892737171319356, + "grad_norm": 4.792402744293213, + "learning_rate": 5.405099295983114e-05, + "loss": 0.2481, + "step": 56700 + }, + { + "epoch": 2.1896598324259626, + "grad_norm": 2.82281756401062, + "learning_rate": 5.402525194022936e-05, + "loss": 0.142, + "step": 56710 + }, + { + "epoch": 2.190045947719989, + "grad_norm": 2.4064247608184814, + "learning_rate": 5.399951092062757e-05, + "loss": 0.2333, + "step": 56720 + }, + { + "epoch": 2.190432063014016, + "grad_norm": 1.3720029592514038, + "learning_rate": 5.3973769901025785e-05, + "loss": 0.1812, + "step": 56730 + }, + { + "epoch": 2.1908181783080427, + "grad_norm": 0.5120772123336792, + "learning_rate": 5.3948028881424e-05, + "loss": 0.2093, + "step": 56740 + }, + { + "epoch": 2.1912042936020697, + "grad_norm": 1.755660057067871, + "learning_rate": 5.3922287861822205e-05, + "loss": 0.1866, + "step": 56750 + }, + { + "epoch": 2.1915904088960962, + "grad_norm": 0.6418548226356506, + "learning_rate": 5.389654684222042e-05, + "loss": 0.1122, + "step": 56760 + }, + { + "epoch": 2.1919765241901232, + "grad_norm": 0.29100701212882996, + "learning_rate": 5.3870805822618645e-05, + "loss": 0.1508, + "step": 56770 + }, + { + "epoch": 2.1923626394841498, + "grad_norm": 1.2336047887802124, + "learning_rate": 5.384506480301685e-05, + "loss": 0.2027, + "step": 56780 + }, + { + "epoch": 2.1927487547781768, + "grad_norm": 1.7961387634277344, + "learning_rate": 5.3819323783415065e-05, + "loss": 0.0698, + "step": 56790 + }, + { + "epoch": 2.1931348700722038, + "grad_norm": 0.45203906297683716, + "learning_rate": 5.379358276381328e-05, + "loss": 0.1201, + "step": 56800 + }, + { + "epoch": 2.1935209853662303, + "grad_norm": 2.4944546222686768, + "learning_rate": 5.376784174421149e-05, + "loss": 0.252, + "step": 56810 + }, + { + "epoch": 2.1939071006602573, + "grad_norm": 0.6468565464019775, + "learning_rate": 5.37421007246097e-05, + "loss": 0.3302, + "step": 56820 + }, + { + "epoch": 2.194293215954284, + "grad_norm": 0.6524060368537903, + "learning_rate": 5.371635970500791e-05, + "loss": 0.1588, + "step": 56830 + }, + { + "epoch": 2.194679331248311, + "grad_norm": 1.2810111045837402, + "learning_rate": 5.369061868540614e-05, + "loss": 0.2929, + "step": 56840 + }, + { + "epoch": 2.1950654465423374, + "grad_norm": 1.5758986473083496, + "learning_rate": 5.3664877665804345e-05, + "loss": 0.2013, + "step": 56850 + }, + { + "epoch": 2.1954515618363644, + "grad_norm": 0.8895549774169922, + "learning_rate": 5.363913664620256e-05, + "loss": 0.1539, + "step": 56860 + }, + { + "epoch": 2.195837677130391, + "grad_norm": 0.0427737757563591, + "learning_rate": 5.361339562660077e-05, + "loss": 0.0949, + "step": 56870 + }, + { + "epoch": 2.196223792424418, + "grad_norm": 0.9843714237213135, + "learning_rate": 5.3587654606998985e-05, + "loss": 0.1784, + "step": 56880 + }, + { + "epoch": 2.196609907718445, + "grad_norm": 0.9936504364013672, + "learning_rate": 5.356191358739719e-05, + "loss": 0.1075, + "step": 56890 + }, + { + "epoch": 2.1969960230124714, + "grad_norm": 0.3362007737159729, + "learning_rate": 5.353617256779542e-05, + "loss": 0.1496, + "step": 56900 + }, + { + "epoch": 2.1973821383064984, + "grad_norm": 0.34201017022132874, + "learning_rate": 5.351043154819363e-05, + "loss": 0.1036, + "step": 56910 + }, + { + "epoch": 2.197768253600525, + "grad_norm": 1.8447175025939941, + "learning_rate": 5.348469052859184e-05, + "loss": 0.1161, + "step": 56920 + }, + { + "epoch": 2.198154368894552, + "grad_norm": 1.2634321451187134, + "learning_rate": 5.345894950899005e-05, + "loss": 0.1302, + "step": 56930 + }, + { + "epoch": 2.1985404841885785, + "grad_norm": 1.3948713541030884, + "learning_rate": 5.3433208489388265e-05, + "loss": 0.2252, + "step": 56940 + }, + { + "epoch": 2.1989265994826055, + "grad_norm": 0.7020501494407654, + "learning_rate": 5.340746746978648e-05, + "loss": 0.1938, + "step": 56950 + }, + { + "epoch": 2.1993127147766325, + "grad_norm": 4.047187805175781, + "learning_rate": 5.33817264501847e-05, + "loss": 0.2477, + "step": 56960 + }, + { + "epoch": 2.199698830070659, + "grad_norm": 0.5025122761726379, + "learning_rate": 5.335598543058291e-05, + "loss": 0.1044, + "step": 56970 + }, + { + "epoch": 2.200084945364686, + "grad_norm": 1.8918673992156982, + "learning_rate": 5.3330244410981125e-05, + "loss": 0.3172, + "step": 56980 + }, + { + "epoch": 2.2004710606587126, + "grad_norm": 3.515730857849121, + "learning_rate": 5.330450339137933e-05, + "loss": 0.0818, + "step": 56990 + }, + { + "epoch": 2.2008571759527396, + "grad_norm": 2.531858444213867, + "learning_rate": 5.3278762371777545e-05, + "loss": 0.2849, + "step": 57000 + }, + { + "epoch": 2.201243291246766, + "grad_norm": 3.145490884780884, + "learning_rate": 5.325302135217576e-05, + "loss": 0.2319, + "step": 57010 + }, + { + "epoch": 2.201629406540793, + "grad_norm": 1.8957561254501343, + "learning_rate": 5.322728033257398e-05, + "loss": 0.2513, + "step": 57020 + }, + { + "epoch": 2.20201552183482, + "grad_norm": 1.326156497001648, + "learning_rate": 5.320153931297219e-05, + "loss": 0.295, + "step": 57030 + }, + { + "epoch": 2.2024016371288466, + "grad_norm": 0.8725142478942871, + "learning_rate": 5.3175798293370405e-05, + "loss": 0.1235, + "step": 57040 + }, + { + "epoch": 2.2027877524228736, + "grad_norm": 0.8360647559165955, + "learning_rate": 5.315005727376862e-05, + "loss": 0.224, + "step": 57050 + }, + { + "epoch": 2.2031738677169, + "grad_norm": 1.381373643875122, + "learning_rate": 5.312431625416683e-05, + "loss": 0.1003, + "step": 57060 + }, + { + "epoch": 2.203559983010927, + "grad_norm": 2.6999964714050293, + "learning_rate": 5.309857523456504e-05, + "loss": 0.2118, + "step": 57070 + }, + { + "epoch": 2.2039460983049537, + "grad_norm": 2.1584982872009277, + "learning_rate": 5.307283421496325e-05, + "loss": 0.2965, + "step": 57080 + }, + { + "epoch": 2.2043322135989807, + "grad_norm": 1.3775367736816406, + "learning_rate": 5.304709319536148e-05, + "loss": 0.2494, + "step": 57090 + }, + { + "epoch": 2.2047183288930077, + "grad_norm": 2.1069607734680176, + "learning_rate": 5.3021352175759684e-05, + "loss": 0.3412, + "step": 57100 + }, + { + "epoch": 2.205104444187034, + "grad_norm": 1.3556911945343018, + "learning_rate": 5.29956111561579e-05, + "loss": 0.2076, + "step": 57110 + }, + { + "epoch": 2.205490559481061, + "grad_norm": 0.4328407049179077, + "learning_rate": 5.296987013655611e-05, + "loss": 0.0691, + "step": 57120 + }, + { + "epoch": 2.2058766747750878, + "grad_norm": 0.24479885399341583, + "learning_rate": 5.2944129116954324e-05, + "loss": 0.3407, + "step": 57130 + }, + { + "epoch": 2.2062627900691147, + "grad_norm": 0.4531087279319763, + "learning_rate": 5.291838809735253e-05, + "loss": 0.1471, + "step": 57140 + }, + { + "epoch": 2.2066489053631413, + "grad_norm": 1.258487582206726, + "learning_rate": 5.289264707775076e-05, + "loss": 0.2213, + "step": 57150 + }, + { + "epoch": 2.2070350206571683, + "grad_norm": 1.8605122566223145, + "learning_rate": 5.286690605814897e-05, + "loss": 0.1839, + "step": 57160 + }, + { + "epoch": 2.207421135951195, + "grad_norm": 0.20423386991024017, + "learning_rate": 5.284116503854718e-05, + "loss": 0.1898, + "step": 57170 + }, + { + "epoch": 2.207807251245222, + "grad_norm": 1.366576910018921, + "learning_rate": 5.281542401894539e-05, + "loss": 0.1533, + "step": 57180 + }, + { + "epoch": 2.208193366539249, + "grad_norm": 2.0091841220855713, + "learning_rate": 5.2789682999343604e-05, + "loss": 0.2834, + "step": 57190 + }, + { + "epoch": 2.2085794818332753, + "grad_norm": 1.271532654762268, + "learning_rate": 5.276394197974182e-05, + "loss": 0.2215, + "step": 57200 + }, + { + "epoch": 2.2089655971273023, + "grad_norm": 1.3751137256622314, + "learning_rate": 5.273820096014004e-05, + "loss": 0.1753, + "step": 57210 + }, + { + "epoch": 2.209351712421329, + "grad_norm": 1.6233354806900024, + "learning_rate": 5.271245994053825e-05, + "loss": 0.425, + "step": 57220 + }, + { + "epoch": 2.209737827715356, + "grad_norm": 0.05391040816903114, + "learning_rate": 5.2686718920936464e-05, + "loss": 0.1759, + "step": 57230 + }, + { + "epoch": 2.2101239430093824, + "grad_norm": 0.0809585228562355, + "learning_rate": 5.266097790133467e-05, + "loss": 0.1902, + "step": 57240 + }, + { + "epoch": 2.2105100583034094, + "grad_norm": 0.05576219782233238, + "learning_rate": 5.2635236881732884e-05, + "loss": 0.1104, + "step": 57250 + }, + { + "epoch": 2.2108961735974364, + "grad_norm": 0.2295994609594345, + "learning_rate": 5.26094958621311e-05, + "loss": 0.1119, + "step": 57260 + }, + { + "epoch": 2.211282288891463, + "grad_norm": 2.042689085006714, + "learning_rate": 5.258375484252932e-05, + "loss": 0.1703, + "step": 57270 + }, + { + "epoch": 2.21166840418549, + "grad_norm": 0.13086611032485962, + "learning_rate": 5.255801382292753e-05, + "loss": 0.2181, + "step": 57280 + }, + { + "epoch": 2.2120545194795165, + "grad_norm": 0.7772855758666992, + "learning_rate": 5.2532272803325744e-05, + "loss": 0.2975, + "step": 57290 + }, + { + "epoch": 2.2124406347735435, + "grad_norm": 1.100147008895874, + "learning_rate": 5.250653178372396e-05, + "loss": 0.1446, + "step": 57300 + }, + { + "epoch": 2.21282675006757, + "grad_norm": 2.2804689407348633, + "learning_rate": 5.248079076412217e-05, + "loss": 0.1779, + "step": 57310 + }, + { + "epoch": 2.213212865361597, + "grad_norm": 0.909376323223114, + "learning_rate": 5.245504974452038e-05, + "loss": 0.1557, + "step": 57320 + }, + { + "epoch": 2.2135989806556235, + "grad_norm": 0.808570921421051, + "learning_rate": 5.242930872491859e-05, + "loss": 0.2107, + "step": 57330 + }, + { + "epoch": 2.2139850959496505, + "grad_norm": 1.6458532810211182, + "learning_rate": 5.240356770531681e-05, + "loss": 0.1729, + "step": 57340 + }, + { + "epoch": 2.2143712112436775, + "grad_norm": 1.928828477859497, + "learning_rate": 5.2377826685715024e-05, + "loss": 0.1234, + "step": 57350 + }, + { + "epoch": 2.214757326537704, + "grad_norm": 1.076491117477417, + "learning_rate": 5.235208566611324e-05, + "loss": 0.2457, + "step": 57360 + }, + { + "epoch": 2.215143441831731, + "grad_norm": 3.043621778488159, + "learning_rate": 5.232634464651145e-05, + "loss": 0.2592, + "step": 57370 + }, + { + "epoch": 2.2155295571257576, + "grad_norm": 0.1770264357328415, + "learning_rate": 5.2300603626909664e-05, + "loss": 0.1214, + "step": 57380 + }, + { + "epoch": 2.2159156724197846, + "grad_norm": 0.12781330943107605, + "learning_rate": 5.227486260730787e-05, + "loss": 0.2213, + "step": 57390 + }, + { + "epoch": 2.216301787713811, + "grad_norm": 1.3419160842895508, + "learning_rate": 5.22491215877061e-05, + "loss": 0.1806, + "step": 57400 + }, + { + "epoch": 2.216687903007838, + "grad_norm": 0.9998745322227478, + "learning_rate": 5.222338056810431e-05, + "loss": 0.1452, + "step": 57410 + }, + { + "epoch": 2.217074018301865, + "grad_norm": 0.5411838889122009, + "learning_rate": 5.219763954850252e-05, + "loss": 0.0978, + "step": 57420 + }, + { + "epoch": 2.2174601335958917, + "grad_norm": 0.505660891532898, + "learning_rate": 5.217189852890073e-05, + "loss": 0.0553, + "step": 57430 + }, + { + "epoch": 2.2178462488899187, + "grad_norm": 2.207895278930664, + "learning_rate": 5.2146157509298944e-05, + "loss": 0.2327, + "step": 57440 + }, + { + "epoch": 2.218232364183945, + "grad_norm": 2.272740364074707, + "learning_rate": 5.212041648969716e-05, + "loss": 0.1773, + "step": 57450 + }, + { + "epoch": 2.218618479477972, + "grad_norm": 1.0580307245254517, + "learning_rate": 5.209467547009538e-05, + "loss": 0.1024, + "step": 57460 + }, + { + "epoch": 2.2190045947719987, + "grad_norm": 0.3714104890823364, + "learning_rate": 5.206893445049359e-05, + "loss": 0.1082, + "step": 57470 + }, + { + "epoch": 2.2193907100660257, + "grad_norm": 0.35416433215141296, + "learning_rate": 5.2043193430891804e-05, + "loss": 0.1082, + "step": 57480 + }, + { + "epoch": 2.2197768253600527, + "grad_norm": 1.1985892057418823, + "learning_rate": 5.201745241129001e-05, + "loss": 0.1227, + "step": 57490 + }, + { + "epoch": 2.2201629406540793, + "grad_norm": 0.7527439594268799, + "learning_rate": 5.1991711391688224e-05, + "loss": 0.2785, + "step": 57500 + }, + { + "epoch": 2.2205490559481063, + "grad_norm": 2.3120546340942383, + "learning_rate": 5.196597037208644e-05, + "loss": 0.1503, + "step": 57510 + }, + { + "epoch": 2.220935171242133, + "grad_norm": 1.3414112329483032, + "learning_rate": 5.194022935248466e-05, + "loss": 0.1436, + "step": 57520 + }, + { + "epoch": 2.22132128653616, + "grad_norm": 0.9937017560005188, + "learning_rate": 5.191448833288287e-05, + "loss": 0.1424, + "step": 57530 + }, + { + "epoch": 2.2217074018301863, + "grad_norm": 0.952620267868042, + "learning_rate": 5.1888747313281084e-05, + "loss": 0.1701, + "step": 57540 + }, + { + "epoch": 2.2220935171242133, + "grad_norm": 0.421085000038147, + "learning_rate": 5.18630062936793e-05, + "loss": 0.1187, + "step": 57550 + }, + { + "epoch": 2.2224796324182403, + "grad_norm": 1.5724862813949585, + "learning_rate": 5.1837265274077504e-05, + "loss": 0.1509, + "step": 57560 + }, + { + "epoch": 2.222865747712267, + "grad_norm": 1.705536127090454, + "learning_rate": 5.181152425447572e-05, + "loss": 0.1546, + "step": 57570 + }, + { + "epoch": 2.223251863006294, + "grad_norm": 0.6752486824989319, + "learning_rate": 5.1785783234873944e-05, + "loss": 0.1785, + "step": 57580 + }, + { + "epoch": 2.2236379783003204, + "grad_norm": 0.1227736845612526, + "learning_rate": 5.176004221527215e-05, + "loss": 0.1198, + "step": 57590 + }, + { + "epoch": 2.2240240935943474, + "grad_norm": 1.0653119087219238, + "learning_rate": 5.1734301195670364e-05, + "loss": 0.2688, + "step": 57600 + }, + { + "epoch": 2.224410208888374, + "grad_norm": 2.1538949012756348, + "learning_rate": 5.170856017606858e-05, + "loss": 0.1402, + "step": 57610 + }, + { + "epoch": 2.224796324182401, + "grad_norm": 2.1059763431549072, + "learning_rate": 5.168281915646679e-05, + "loss": 0.1319, + "step": 57620 + }, + { + "epoch": 2.2251824394764275, + "grad_norm": 1.8453232049942017, + "learning_rate": 5.1657078136865004e-05, + "loss": 0.154, + "step": 57630 + }, + { + "epoch": 2.2255685547704545, + "grad_norm": 1.7324992418289185, + "learning_rate": 5.163133711726321e-05, + "loss": 0.2807, + "step": 57640 + }, + { + "epoch": 2.2259546700644814, + "grad_norm": 0.4680674374103546, + "learning_rate": 5.160559609766144e-05, + "loss": 0.1648, + "step": 57650 + }, + { + "epoch": 2.226340785358508, + "grad_norm": 0.2356865406036377, + "learning_rate": 5.1579855078059643e-05, + "loss": 0.2068, + "step": 57660 + }, + { + "epoch": 2.226726900652535, + "grad_norm": 1.2691845893859863, + "learning_rate": 5.155411405845786e-05, + "loss": 0.425, + "step": 57670 + }, + { + "epoch": 2.2271130159465615, + "grad_norm": 0.09415816515684128, + "learning_rate": 5.152837303885607e-05, + "loss": 0.1015, + "step": 57680 + }, + { + "epoch": 2.2274991312405885, + "grad_norm": 1.1072195768356323, + "learning_rate": 5.1502632019254283e-05, + "loss": 0.2324, + "step": 57690 + }, + { + "epoch": 2.227885246534615, + "grad_norm": 1.774086594581604, + "learning_rate": 5.14768909996525e-05, + "loss": 0.1382, + "step": 57700 + }, + { + "epoch": 2.228271361828642, + "grad_norm": 1.3065643310546875, + "learning_rate": 5.145114998005072e-05, + "loss": 0.1843, + "step": 57710 + }, + { + "epoch": 2.2286574771226686, + "grad_norm": 1.4786202907562256, + "learning_rate": 5.142540896044893e-05, + "loss": 0.2328, + "step": 57720 + }, + { + "epoch": 2.2290435924166956, + "grad_norm": 1.2334314584732056, + "learning_rate": 5.1399667940847143e-05, + "loss": 0.2064, + "step": 57730 + }, + { + "epoch": 2.2294297077107226, + "grad_norm": 0.6082472801208496, + "learning_rate": 5.137392692124535e-05, + "loss": 0.1278, + "step": 57740 + }, + { + "epoch": 2.229815823004749, + "grad_norm": 1.2659168243408203, + "learning_rate": 5.134818590164356e-05, + "loss": 0.1877, + "step": 57750 + }, + { + "epoch": 2.230201938298776, + "grad_norm": 1.652754783630371, + "learning_rate": 5.1322444882041777e-05, + "loss": 0.2277, + "step": 57760 + }, + { + "epoch": 2.2305880535928027, + "grad_norm": 1.6349531412124634, + "learning_rate": 5.129670386244e-05, + "loss": 0.2374, + "step": 57770 + }, + { + "epoch": 2.2309741688868296, + "grad_norm": 1.2513495683670044, + "learning_rate": 5.127096284283821e-05, + "loss": 0.2055, + "step": 57780 + }, + { + "epoch": 2.231360284180856, + "grad_norm": 2.0995755195617676, + "learning_rate": 5.124522182323642e-05, + "loss": 0.1968, + "step": 57790 + }, + { + "epoch": 2.231746399474883, + "grad_norm": 2.2957067489624023, + "learning_rate": 5.1219480803634637e-05, + "loss": 0.233, + "step": 57800 + }, + { + "epoch": 2.23213251476891, + "grad_norm": 2.303072452545166, + "learning_rate": 5.119373978403284e-05, + "loss": 0.0924, + "step": 57810 + }, + { + "epoch": 2.2325186300629367, + "grad_norm": 0.6360287070274353, + "learning_rate": 5.1167998764431056e-05, + "loss": 0.1557, + "step": 57820 + }, + { + "epoch": 2.2329047453569637, + "grad_norm": 0.720551073551178, + "learning_rate": 5.114225774482928e-05, + "loss": 0.1619, + "step": 57830 + }, + { + "epoch": 2.2332908606509903, + "grad_norm": 0.332627534866333, + "learning_rate": 5.111651672522749e-05, + "loss": 0.1529, + "step": 57840 + }, + { + "epoch": 2.2336769759450172, + "grad_norm": 2.1180593967437744, + "learning_rate": 5.10907757056257e-05, + "loss": 0.2647, + "step": 57850 + }, + { + "epoch": 2.234063091239044, + "grad_norm": 0.023406701162457466, + "learning_rate": 5.1065034686023916e-05, + "loss": 0.2461, + "step": 57860 + }, + { + "epoch": 2.2344492065330708, + "grad_norm": 0.887008011341095, + "learning_rate": 5.103929366642213e-05, + "loss": 0.191, + "step": 57870 + }, + { + "epoch": 2.2348353218270978, + "grad_norm": 0.9116653203964233, + "learning_rate": 5.1013552646820336e-05, + "loss": 0.2199, + "step": 57880 + }, + { + "epoch": 2.2352214371211243, + "grad_norm": 2.72094464302063, + "learning_rate": 5.098781162721855e-05, + "loss": 0.1219, + "step": 57890 + }, + { + "epoch": 2.2356075524151513, + "grad_norm": 1.7832390069961548, + "learning_rate": 5.0962070607616776e-05, + "loss": 0.1516, + "step": 57900 + }, + { + "epoch": 2.235993667709178, + "grad_norm": 1.892029881477356, + "learning_rate": 5.093632958801498e-05, + "loss": 0.1943, + "step": 57910 + }, + { + "epoch": 2.236379783003205, + "grad_norm": 1.284244418144226, + "learning_rate": 5.0910588568413196e-05, + "loss": 0.0762, + "step": 57920 + }, + { + "epoch": 2.2367658982972314, + "grad_norm": 2.9343578815460205, + "learning_rate": 5.088484754881141e-05, + "loss": 0.1623, + "step": 57930 + }, + { + "epoch": 2.2371520135912584, + "grad_norm": 2.6697938442230225, + "learning_rate": 5.085910652920962e-05, + "loss": 0.1781, + "step": 57940 + }, + { + "epoch": 2.2375381288852854, + "grad_norm": 0.14455921947956085, + "learning_rate": 5.0833365509607836e-05, + "loss": 0.3026, + "step": 57950 + }, + { + "epoch": 2.237924244179312, + "grad_norm": 0.3427145183086395, + "learning_rate": 5.0807624490006056e-05, + "loss": 0.047, + "step": 57960 + }, + { + "epoch": 2.238310359473339, + "grad_norm": 0.4725586473941803, + "learning_rate": 5.078188347040427e-05, + "loss": 0.1794, + "step": 57970 + }, + { + "epoch": 2.2386964747673654, + "grad_norm": 2.0606446266174316, + "learning_rate": 5.075614245080248e-05, + "loss": 0.0948, + "step": 57980 + }, + { + "epoch": 2.2390825900613924, + "grad_norm": 0.08911284059286118, + "learning_rate": 5.073040143120069e-05, + "loss": 0.1171, + "step": 57990 + }, + { + "epoch": 2.239468705355419, + "grad_norm": 0.05132399871945381, + "learning_rate": 5.07046604115989e-05, + "loss": 0.1201, + "step": 58000 + }, + { + "epoch": 2.239854820649446, + "grad_norm": 0.5799759030342102, + "learning_rate": 5.0678919391997116e-05, + "loss": 0.1702, + "step": 58010 + }, + { + "epoch": 2.240240935943473, + "grad_norm": 1.5331569910049438, + "learning_rate": 5.0653178372395336e-05, + "loss": 0.2916, + "step": 58020 + }, + { + "epoch": 2.2406270512374995, + "grad_norm": 0.31285667419433594, + "learning_rate": 5.062743735279355e-05, + "loss": 0.1659, + "step": 58030 + }, + { + "epoch": 2.2410131665315265, + "grad_norm": 1.9137883186340332, + "learning_rate": 5.060169633319176e-05, + "loss": 0.0994, + "step": 58040 + }, + { + "epoch": 2.241399281825553, + "grad_norm": 0.0040522972121834755, + "learning_rate": 5.0575955313589976e-05, + "loss": 0.1047, + "step": 58050 + }, + { + "epoch": 2.24178539711958, + "grad_norm": 1.4532781839370728, + "learning_rate": 5.055021429398818e-05, + "loss": 0.1351, + "step": 58060 + }, + { + "epoch": 2.2421715124136066, + "grad_norm": 1.1458393335342407, + "learning_rate": 5.0524473274386396e-05, + "loss": 0.0966, + "step": 58070 + }, + { + "epoch": 2.2425576277076336, + "grad_norm": 0.4871302545070648, + "learning_rate": 5.049873225478462e-05, + "loss": 0.2297, + "step": 58080 + }, + { + "epoch": 2.24294374300166, + "grad_norm": 0.8895847201347351, + "learning_rate": 5.047299123518283e-05, + "loss": 0.1101, + "step": 58090 + }, + { + "epoch": 2.243329858295687, + "grad_norm": 1.5819259881973267, + "learning_rate": 5.044725021558104e-05, + "loss": 0.165, + "step": 58100 + }, + { + "epoch": 2.243715973589714, + "grad_norm": 0.4520101249217987, + "learning_rate": 5.0421509195979256e-05, + "loss": 0.2857, + "step": 58110 + }, + { + "epoch": 2.2441020888837406, + "grad_norm": 0.6979352235794067, + "learning_rate": 5.039576817637747e-05, + "loss": 0.1135, + "step": 58120 + }, + { + "epoch": 2.2444882041777676, + "grad_norm": 0.10534228384494781, + "learning_rate": 5.0370027156775676e-05, + "loss": 0.1788, + "step": 58130 + }, + { + "epoch": 2.244874319471794, + "grad_norm": 1.593078851699829, + "learning_rate": 5.034428613717389e-05, + "loss": 0.0948, + "step": 58140 + }, + { + "epoch": 2.245260434765821, + "grad_norm": 0.7897083163261414, + "learning_rate": 5.0318545117572116e-05, + "loss": 0.1155, + "step": 58150 + }, + { + "epoch": 2.2456465500598477, + "grad_norm": 0.17938394844532013, + "learning_rate": 5.029280409797032e-05, + "loss": 0.246, + "step": 58160 + }, + { + "epoch": 2.2460326653538747, + "grad_norm": 0.9242120385169983, + "learning_rate": 5.0267063078368536e-05, + "loss": 0.2686, + "step": 58170 + }, + { + "epoch": 2.2464187806479012, + "grad_norm": 0.46744218468666077, + "learning_rate": 5.024132205876675e-05, + "loss": 0.105, + "step": 58180 + }, + { + "epoch": 2.2468048959419282, + "grad_norm": 1.0429635047912598, + "learning_rate": 5.021558103916496e-05, + "loss": 0.0922, + "step": 58190 + }, + { + "epoch": 2.247191011235955, + "grad_norm": 2.889759063720703, + "learning_rate": 5.018984001956317e-05, + "loss": 0.2586, + "step": 58200 + }, + { + "epoch": 2.2475771265299818, + "grad_norm": 1.0298150777816772, + "learning_rate": 5.0164098999961396e-05, + "loss": 0.1373, + "step": 58210 + }, + { + "epoch": 2.2479632418240088, + "grad_norm": 0.14992554485797882, + "learning_rate": 5.013835798035961e-05, + "loss": 0.2644, + "step": 58220 + }, + { + "epoch": 2.2483493571180353, + "grad_norm": 0.8929703831672668, + "learning_rate": 5.0112616960757816e-05, + "loss": 0.2109, + "step": 58230 + }, + { + "epoch": 2.2487354724120623, + "grad_norm": 0.8829396367073059, + "learning_rate": 5.008687594115603e-05, + "loss": 0.2353, + "step": 58240 + }, + { + "epoch": 2.249121587706089, + "grad_norm": 0.3709293305873871, + "learning_rate": 5.006113492155424e-05, + "loss": 0.099, + "step": 58250 + }, + { + "epoch": 2.249507703000116, + "grad_norm": 0.17572759091854095, + "learning_rate": 5.0035393901952456e-05, + "loss": 0.1161, + "step": 58260 + }, + { + "epoch": 2.249893818294143, + "grad_norm": 0.3241714537143707, + "learning_rate": 5.0009652882350676e-05, + "loss": 0.2081, + "step": 58270 + }, + { + "epoch": 2.2502799335881694, + "grad_norm": 0.5595920085906982, + "learning_rate": 4.998391186274888e-05, + "loss": 0.2376, + "step": 58280 + }, + { + "epoch": 2.2506660488821963, + "grad_norm": 0.8801298141479492, + "learning_rate": 4.99581708431471e-05, + "loss": 0.1423, + "step": 58290 + }, + { + "epoch": 2.251052164176223, + "grad_norm": 1.4857895374298096, + "learning_rate": 4.9932429823545316e-05, + "loss": 0.169, + "step": 58300 + }, + { + "epoch": 2.25143827947025, + "grad_norm": 1.0327515602111816, + "learning_rate": 4.990668880394352e-05, + "loss": 0.1127, + "step": 58310 + }, + { + "epoch": 2.2518243947642764, + "grad_norm": 0.19778093695640564, + "learning_rate": 4.988094778434174e-05, + "loss": 0.2655, + "step": 58320 + }, + { + "epoch": 2.2522105100583034, + "grad_norm": 1.3672188520431519, + "learning_rate": 4.9855206764739956e-05, + "loss": 0.1252, + "step": 58330 + }, + { + "epoch": 2.2525966253523304, + "grad_norm": 3.6712214946746826, + "learning_rate": 4.982946574513816e-05, + "loss": 0.2721, + "step": 58340 + }, + { + "epoch": 2.252982740646357, + "grad_norm": 0.19810612499713898, + "learning_rate": 4.980372472553638e-05, + "loss": 0.1008, + "step": 58350 + }, + { + "epoch": 2.253368855940384, + "grad_norm": 0.5414086580276489, + "learning_rate": 4.9777983705934596e-05, + "loss": 0.1898, + "step": 58360 + }, + { + "epoch": 2.2537549712344105, + "grad_norm": 1.883710503578186, + "learning_rate": 4.975224268633281e-05, + "loss": 0.1841, + "step": 58370 + }, + { + "epoch": 2.2541410865284375, + "grad_norm": 0.3979630172252655, + "learning_rate": 4.972650166673102e-05, + "loss": 0.1712, + "step": 58380 + }, + { + "epoch": 2.254527201822464, + "grad_norm": 1.2606881856918335, + "learning_rate": 4.9700760647129236e-05, + "loss": 0.1772, + "step": 58390 + }, + { + "epoch": 2.254913317116491, + "grad_norm": 0.6021280288696289, + "learning_rate": 4.967501962752745e-05, + "loss": 0.1662, + "step": 58400 + }, + { + "epoch": 2.255299432410518, + "grad_norm": 0.4324108362197876, + "learning_rate": 4.964927860792566e-05, + "loss": 0.138, + "step": 58410 + }, + { + "epoch": 2.2556855477045445, + "grad_norm": 1.147596001625061, + "learning_rate": 4.9623537588323875e-05, + "loss": 0.1956, + "step": 58420 + }, + { + "epoch": 2.2560716629985715, + "grad_norm": 2.516636371612549, + "learning_rate": 4.959779656872209e-05, + "loss": 0.2031, + "step": 58430 + }, + { + "epoch": 2.256457778292598, + "grad_norm": 1.1109521389007568, + "learning_rate": 4.95720555491203e-05, + "loss": 0.2845, + "step": 58440 + }, + { + "epoch": 2.256843893586625, + "grad_norm": 0.3227555453777313, + "learning_rate": 4.9546314529518515e-05, + "loss": 0.0596, + "step": 58450 + }, + { + "epoch": 2.2572300088806516, + "grad_norm": 2.5064280033111572, + "learning_rate": 4.952057350991673e-05, + "loss": 0.289, + "step": 58460 + }, + { + "epoch": 2.2576161241746786, + "grad_norm": 1.0245225429534912, + "learning_rate": 4.949483249031495e-05, + "loss": 0.1458, + "step": 58470 + }, + { + "epoch": 2.2580022394687056, + "grad_norm": 0.058567408472299576, + "learning_rate": 4.9469091470713155e-05, + "loss": 0.2719, + "step": 58480 + }, + { + "epoch": 2.258388354762732, + "grad_norm": 2.1362061500549316, + "learning_rate": 4.944335045111137e-05, + "loss": 0.3814, + "step": 58490 + }, + { + "epoch": 2.258774470056759, + "grad_norm": 0.363843709230423, + "learning_rate": 4.941760943150959e-05, + "loss": 0.0669, + "step": 58500 + }, + { + "epoch": 2.2591605853507857, + "grad_norm": 0.1753295511007309, + "learning_rate": 4.93918684119078e-05, + "loss": 0.1246, + "step": 58510 + }, + { + "epoch": 2.2595467006448127, + "grad_norm": 1.6673377752304077, + "learning_rate": 4.936612739230601e-05, + "loss": 0.2781, + "step": 58520 + }, + { + "epoch": 2.259932815938839, + "grad_norm": 3.1135804653167725, + "learning_rate": 4.934038637270422e-05, + "loss": 0.1951, + "step": 58530 + }, + { + "epoch": 2.260318931232866, + "grad_norm": 0.8234933614730835, + "learning_rate": 4.931464535310244e-05, + "loss": 0.11, + "step": 58540 + }, + { + "epoch": 2.2607050465268927, + "grad_norm": 1.0099560022354126, + "learning_rate": 4.928890433350065e-05, + "loss": 0.2741, + "step": 58550 + }, + { + "epoch": 2.2610911618209197, + "grad_norm": 2.2589969635009766, + "learning_rate": 4.926316331389886e-05, + "loss": 0.1243, + "step": 58560 + }, + { + "epoch": 2.2614772771149463, + "grad_norm": 0.5491199493408203, + "learning_rate": 4.923742229429708e-05, + "loss": 0.1302, + "step": 58570 + }, + { + "epoch": 2.2618633924089733, + "grad_norm": 0.9286119937896729, + "learning_rate": 4.9211681274695295e-05, + "loss": 0.1047, + "step": 58580 + }, + { + "epoch": 2.2622495077030003, + "grad_norm": 0.6178199052810669, + "learning_rate": 4.91859402550935e-05, + "loss": 0.1143, + "step": 58590 + }, + { + "epoch": 2.262635622997027, + "grad_norm": 1.6250818967819214, + "learning_rate": 4.916019923549172e-05, + "loss": 0.2461, + "step": 58600 + }, + { + "epoch": 2.263021738291054, + "grad_norm": 1.1366840600967407, + "learning_rate": 4.9134458215889935e-05, + "loss": 0.2128, + "step": 58610 + }, + { + "epoch": 2.2634078535850803, + "grad_norm": 0.38859716057777405, + "learning_rate": 4.910871719628815e-05, + "loss": 0.1476, + "step": 58620 + }, + { + "epoch": 2.2637939688791073, + "grad_norm": 0.02930479310452938, + "learning_rate": 4.908297617668636e-05, + "loss": 0.1901, + "step": 58630 + }, + { + "epoch": 2.264180084173134, + "grad_norm": 1.4426459074020386, + "learning_rate": 4.9057235157084575e-05, + "loss": 0.1736, + "step": 58640 + }, + { + "epoch": 2.264566199467161, + "grad_norm": 1.103959321975708, + "learning_rate": 4.903149413748279e-05, + "loss": 0.2207, + "step": 58650 + }, + { + "epoch": 2.264952314761188, + "grad_norm": 3.1351921558380127, + "learning_rate": 4.9005753117881e-05, + "loss": 0.394, + "step": 58660 + }, + { + "epoch": 2.2653384300552144, + "grad_norm": 0.33870574831962585, + "learning_rate": 4.8980012098279215e-05, + "loss": 0.1958, + "step": 58670 + }, + { + "epoch": 2.2657245453492414, + "grad_norm": 0.08599444478750229, + "learning_rate": 4.895427107867743e-05, + "loss": 0.0842, + "step": 58680 + }, + { + "epoch": 2.266110660643268, + "grad_norm": 0.7150046229362488, + "learning_rate": 4.892853005907564e-05, + "loss": 0.1917, + "step": 58690 + }, + { + "epoch": 2.266496775937295, + "grad_norm": 1.561062216758728, + "learning_rate": 4.8902789039473855e-05, + "loss": 0.2327, + "step": 58700 + }, + { + "epoch": 2.2668828912313215, + "grad_norm": 1.3899431228637695, + "learning_rate": 4.887704801987207e-05, + "loss": 0.1456, + "step": 58710 + }, + { + "epoch": 2.2672690065253485, + "grad_norm": 0.5647567510604858, + "learning_rate": 4.885130700027029e-05, + "loss": 0.2036, + "step": 58720 + }, + { + "epoch": 2.2676551218193755, + "grad_norm": 0.2155967652797699, + "learning_rate": 4.8825565980668495e-05, + "loss": 0.1159, + "step": 58730 + }, + { + "epoch": 2.268041237113402, + "grad_norm": 1.0128939151763916, + "learning_rate": 4.879982496106671e-05, + "loss": 0.1342, + "step": 58740 + }, + { + "epoch": 2.268427352407429, + "grad_norm": 1.079142689704895, + "learning_rate": 4.877408394146493e-05, + "loss": 0.1689, + "step": 58750 + }, + { + "epoch": 2.2688134677014555, + "grad_norm": 1.2162476778030396, + "learning_rate": 4.874834292186314e-05, + "loss": 0.2256, + "step": 58760 + }, + { + "epoch": 2.2691995829954825, + "grad_norm": 1.6972836256027222, + "learning_rate": 4.872260190226135e-05, + "loss": 0.1517, + "step": 58770 + }, + { + "epoch": 2.269585698289509, + "grad_norm": 1.4847822189331055, + "learning_rate": 4.869686088265956e-05, + "loss": 0.2296, + "step": 58780 + }, + { + "epoch": 2.269971813583536, + "grad_norm": 1.7321871519088745, + "learning_rate": 4.867111986305778e-05, + "loss": 0.2396, + "step": 58790 + }, + { + "epoch": 2.270357928877563, + "grad_norm": 1.468248724937439, + "learning_rate": 4.864537884345599e-05, + "loss": 0.1501, + "step": 58800 + }, + { + "epoch": 2.2707440441715896, + "grad_norm": 1.125684380531311, + "learning_rate": 4.86196378238542e-05, + "loss": 0.2444, + "step": 58810 + }, + { + "epoch": 2.2711301594656166, + "grad_norm": 2.3958170413970947, + "learning_rate": 4.859389680425242e-05, + "loss": 0.3088, + "step": 58820 + }, + { + "epoch": 2.271516274759643, + "grad_norm": 0.8419416546821594, + "learning_rate": 4.8568155784650635e-05, + "loss": 0.1215, + "step": 58830 + }, + { + "epoch": 2.27190239005367, + "grad_norm": 0.3124147057533264, + "learning_rate": 4.854241476504884e-05, + "loss": 0.2069, + "step": 58840 + }, + { + "epoch": 2.2722885053476967, + "grad_norm": 0.6150888204574585, + "learning_rate": 4.851667374544706e-05, + "loss": 0.0483, + "step": 58850 + }, + { + "epoch": 2.2726746206417237, + "grad_norm": 0.7708920836448669, + "learning_rate": 4.8490932725845275e-05, + "loss": 0.2787, + "step": 58860 + }, + { + "epoch": 2.2730607359357506, + "grad_norm": 1.123910665512085, + "learning_rate": 4.846519170624348e-05, + "loss": 0.1875, + "step": 58870 + }, + { + "epoch": 2.273446851229777, + "grad_norm": 1.7842246294021606, + "learning_rate": 4.84394506866417e-05, + "loss": 0.2003, + "step": 58880 + }, + { + "epoch": 2.273832966523804, + "grad_norm": 0.09984418004751205, + "learning_rate": 4.8413709667039915e-05, + "loss": 0.0445, + "step": 58890 + }, + { + "epoch": 2.2742190818178307, + "grad_norm": 0.9539859890937805, + "learning_rate": 4.838796864743813e-05, + "loss": 0.1308, + "step": 58900 + }, + { + "epoch": 2.2746051971118577, + "grad_norm": 2.2655584812164307, + "learning_rate": 4.836222762783634e-05, + "loss": 0.1728, + "step": 58910 + }, + { + "epoch": 2.2749913124058843, + "grad_norm": 0.8873695731163025, + "learning_rate": 4.8336486608234555e-05, + "loss": 0.2559, + "step": 58920 + }, + { + "epoch": 2.2753774276999112, + "grad_norm": 0.6130178570747375, + "learning_rate": 4.831074558863277e-05, + "loss": 0.0859, + "step": 58930 + }, + { + "epoch": 2.2757635429939382, + "grad_norm": 2.61225962638855, + "learning_rate": 4.828500456903098e-05, + "loss": 0.1612, + "step": 58940 + }, + { + "epoch": 2.276149658287965, + "grad_norm": 1.187378168106079, + "learning_rate": 4.8259263549429195e-05, + "loss": 0.1222, + "step": 58950 + }, + { + "epoch": 2.2765357735819918, + "grad_norm": 0.3115352392196655, + "learning_rate": 4.823352252982741e-05, + "loss": 0.2081, + "step": 58960 + }, + { + "epoch": 2.2769218888760183, + "grad_norm": 1.1406041383743286, + "learning_rate": 4.820778151022563e-05, + "loss": 0.1137, + "step": 58970 + }, + { + "epoch": 2.2773080041700453, + "grad_norm": 2.415290355682373, + "learning_rate": 4.8182040490623834e-05, + "loss": 0.2498, + "step": 58980 + }, + { + "epoch": 2.277694119464072, + "grad_norm": 0.7312545776367188, + "learning_rate": 4.815629947102205e-05, + "loss": 0.2019, + "step": 58990 + }, + { + "epoch": 2.278080234758099, + "grad_norm": 0.3095935583114624, + "learning_rate": 4.813055845142027e-05, + "loss": 0.0833, + "step": 59000 + }, + { + "epoch": 2.2784663500521254, + "grad_norm": 2.551358938217163, + "learning_rate": 4.8104817431818474e-05, + "loss": 0.1599, + "step": 59010 + }, + { + "epoch": 2.2788524653461524, + "grad_norm": 0.8552582859992981, + "learning_rate": 4.807907641221669e-05, + "loss": 0.1445, + "step": 59020 + }, + { + "epoch": 2.279238580640179, + "grad_norm": 0.5667589902877808, + "learning_rate": 4.80533353926149e-05, + "loss": 0.1746, + "step": 59030 + }, + { + "epoch": 2.279624695934206, + "grad_norm": 1.4083415269851685, + "learning_rate": 4.802759437301312e-05, + "loss": 0.1645, + "step": 59040 + }, + { + "epoch": 2.280010811228233, + "grad_norm": 0.04097180813550949, + "learning_rate": 4.800185335341133e-05, + "loss": 0.1898, + "step": 59050 + }, + { + "epoch": 2.2803969265222594, + "grad_norm": 0.019375400617718697, + "learning_rate": 4.797611233380954e-05, + "loss": 0.1723, + "step": 59060 + }, + { + "epoch": 2.2807830418162864, + "grad_norm": 1.4372104406356812, + "learning_rate": 4.795037131420776e-05, + "loss": 0.1243, + "step": 59070 + }, + { + "epoch": 2.281169157110313, + "grad_norm": 0.9807009696960449, + "learning_rate": 4.7924630294605974e-05, + "loss": 0.2997, + "step": 59080 + }, + { + "epoch": 2.28155527240434, + "grad_norm": 0.48975950479507446, + "learning_rate": 4.789888927500418e-05, + "loss": 0.1613, + "step": 59090 + }, + { + "epoch": 2.2819413876983665, + "grad_norm": 2.779517412185669, + "learning_rate": 4.78731482554024e-05, + "loss": 0.2774, + "step": 59100 + }, + { + "epoch": 2.2823275029923935, + "grad_norm": 1.5717260837554932, + "learning_rate": 4.7847407235800614e-05, + "loss": 0.1473, + "step": 59110 + }, + { + "epoch": 2.2827136182864205, + "grad_norm": 0.19590778648853302, + "learning_rate": 4.782166621619882e-05, + "loss": 0.2223, + "step": 59120 + }, + { + "epoch": 2.283099733580447, + "grad_norm": 1.9272565841674805, + "learning_rate": 4.779592519659704e-05, + "loss": 0.1101, + "step": 59130 + }, + { + "epoch": 2.283485848874474, + "grad_norm": 1.9476497173309326, + "learning_rate": 4.7770184176995254e-05, + "loss": 0.1627, + "step": 59140 + }, + { + "epoch": 2.2838719641685006, + "grad_norm": 4.072344779968262, + "learning_rate": 4.774444315739347e-05, + "loss": 0.2169, + "step": 59150 + }, + { + "epoch": 2.2842580794625276, + "grad_norm": 2.180009603500366, + "learning_rate": 4.771870213779168e-05, + "loss": 0.1348, + "step": 59160 + }, + { + "epoch": 2.284644194756554, + "grad_norm": 1.6288330554962158, + "learning_rate": 4.7692961118189894e-05, + "loss": 0.157, + "step": 59170 + }, + { + "epoch": 2.285030310050581, + "grad_norm": 0.8936790227890015, + "learning_rate": 4.766722009858811e-05, + "loss": 0.2966, + "step": 59180 + }, + { + "epoch": 2.285416425344608, + "grad_norm": 0.34459662437438965, + "learning_rate": 4.764147907898632e-05, + "loss": 0.2497, + "step": 59190 + }, + { + "epoch": 2.2858025406386346, + "grad_norm": 1.2364985942840576, + "learning_rate": 4.7615738059384534e-05, + "loss": 0.1951, + "step": 59200 + }, + { + "epoch": 2.2861886559326616, + "grad_norm": 0.43685224652290344, + "learning_rate": 4.758999703978275e-05, + "loss": 0.1707, + "step": 59210 + }, + { + "epoch": 2.286574771226688, + "grad_norm": 1.5797430276870728, + "learning_rate": 4.756425602018096e-05, + "loss": 0.1328, + "step": 59220 + }, + { + "epoch": 2.286960886520715, + "grad_norm": 1.0080262422561646, + "learning_rate": 4.7538515000579174e-05, + "loss": 0.2743, + "step": 59230 + }, + { + "epoch": 2.2873470018147417, + "grad_norm": 2.561823844909668, + "learning_rate": 4.751277398097739e-05, + "loss": 0.3414, + "step": 59240 + }, + { + "epoch": 2.2877331171087687, + "grad_norm": 1.4492895603179932, + "learning_rate": 4.748703296137561e-05, + "loss": 0.1942, + "step": 59250 + }, + { + "epoch": 2.2881192324027957, + "grad_norm": 2.6348021030426025, + "learning_rate": 4.7461291941773814e-05, + "loss": 0.2293, + "step": 59260 + }, + { + "epoch": 2.2885053476968222, + "grad_norm": 0.5275348424911499, + "learning_rate": 4.743555092217203e-05, + "loss": 0.1436, + "step": 59270 + }, + { + "epoch": 2.2888914629908492, + "grad_norm": 0.3320735991001129, + "learning_rate": 4.740980990257025e-05, + "loss": 0.3213, + "step": 59280 + }, + { + "epoch": 2.2892775782848758, + "grad_norm": 0.33580052852630615, + "learning_rate": 4.738406888296846e-05, + "loss": 0.0988, + "step": 59290 + }, + { + "epoch": 2.2896636935789028, + "grad_norm": 0.6209644079208374, + "learning_rate": 4.735832786336667e-05, + "loss": 0.0969, + "step": 59300 + }, + { + "epoch": 2.2900498088729293, + "grad_norm": 0.8504312038421631, + "learning_rate": 4.733258684376488e-05, + "loss": 0.1328, + "step": 59310 + }, + { + "epoch": 2.2904359241669563, + "grad_norm": 1.8116223812103271, + "learning_rate": 4.73068458241631e-05, + "loss": 0.2032, + "step": 59320 + }, + { + "epoch": 2.2908220394609833, + "grad_norm": 3.810727596282959, + "learning_rate": 4.728110480456131e-05, + "loss": 0.2162, + "step": 59330 + }, + { + "epoch": 2.29120815475501, + "grad_norm": 0.39549925923347473, + "learning_rate": 4.725536378495952e-05, + "loss": 0.1421, + "step": 59340 + }, + { + "epoch": 2.291594270049037, + "grad_norm": 0.6922689080238342, + "learning_rate": 4.722962276535774e-05, + "loss": 0.23, + "step": 59350 + }, + { + "epoch": 2.2919803853430634, + "grad_norm": 2.8557536602020264, + "learning_rate": 4.7203881745755954e-05, + "loss": 0.2824, + "step": 59360 + }, + { + "epoch": 2.2923665006370904, + "grad_norm": 1.2936065196990967, + "learning_rate": 4.717814072615416e-05, + "loss": 0.2515, + "step": 59370 + }, + { + "epoch": 2.292752615931117, + "grad_norm": 0.17199493944644928, + "learning_rate": 4.715239970655238e-05, + "loss": 0.1029, + "step": 59380 + }, + { + "epoch": 2.293138731225144, + "grad_norm": 0.06957222521305084, + "learning_rate": 4.7126658686950594e-05, + "loss": 0.1535, + "step": 59390 + }, + { + "epoch": 2.2935248465191704, + "grad_norm": 0.24227716028690338, + "learning_rate": 4.710091766734881e-05, + "loss": 0.2497, + "step": 59400 + }, + { + "epoch": 2.2939109618131974, + "grad_norm": 0.49250227212905884, + "learning_rate": 4.707517664774702e-05, + "loss": 0.1416, + "step": 59410 + }, + { + "epoch": 2.294297077107224, + "grad_norm": 0.2832399010658264, + "learning_rate": 4.7049435628145234e-05, + "loss": 0.1156, + "step": 59420 + }, + { + "epoch": 2.294683192401251, + "grad_norm": 0.6916882395744324, + "learning_rate": 4.702369460854345e-05, + "loss": 0.315, + "step": 59430 + }, + { + "epoch": 2.295069307695278, + "grad_norm": 2.099567174911499, + "learning_rate": 4.699795358894166e-05, + "loss": 0.2208, + "step": 59440 + }, + { + "epoch": 2.2954554229893045, + "grad_norm": 0.9960312843322754, + "learning_rate": 4.6972212569339874e-05, + "loss": 0.2152, + "step": 59450 + }, + { + "epoch": 2.2958415382833315, + "grad_norm": 0.8338409662246704, + "learning_rate": 4.694647154973809e-05, + "loss": 0.1238, + "step": 59460 + }, + { + "epoch": 2.296227653577358, + "grad_norm": 1.1573169231414795, + "learning_rate": 4.69207305301363e-05, + "loss": 0.1141, + "step": 59470 + }, + { + "epoch": 2.296613768871385, + "grad_norm": 0.639020562171936, + "learning_rate": 4.6894989510534514e-05, + "loss": 0.1143, + "step": 59480 + }, + { + "epoch": 2.2969998841654116, + "grad_norm": 1.2339776754379272, + "learning_rate": 4.686924849093273e-05, + "loss": 0.0832, + "step": 59490 + }, + { + "epoch": 2.2973859994594386, + "grad_norm": 0.5958113074302673, + "learning_rate": 4.684350747133095e-05, + "loss": 0.1435, + "step": 59500 + }, + { + "epoch": 2.2977721147534655, + "grad_norm": 0.36563804745674133, + "learning_rate": 4.6817766451729154e-05, + "loss": 0.21, + "step": 59510 + }, + { + "epoch": 2.298158230047492, + "grad_norm": 0.8008506894111633, + "learning_rate": 4.679202543212737e-05, + "loss": 0.2525, + "step": 59520 + }, + { + "epoch": 2.298544345341519, + "grad_norm": 1.644982099533081, + "learning_rate": 4.676628441252559e-05, + "loss": 0.1489, + "step": 59530 + }, + { + "epoch": 2.2989304606355456, + "grad_norm": 0.8418545126914978, + "learning_rate": 4.6740543392923793e-05, + "loss": 0.1275, + "step": 59540 + }, + { + "epoch": 2.2993165759295726, + "grad_norm": 1.7769790887832642, + "learning_rate": 4.671480237332201e-05, + "loss": 0.2149, + "step": 59550 + }, + { + "epoch": 2.299702691223599, + "grad_norm": 1.3088769912719727, + "learning_rate": 4.668906135372022e-05, + "loss": 0.3798, + "step": 59560 + }, + { + "epoch": 2.300088806517626, + "grad_norm": 0.0827949270606041, + "learning_rate": 4.666332033411844e-05, + "loss": 0.2128, + "step": 59570 + }, + { + "epoch": 2.300474921811653, + "grad_norm": 0.7772637605667114, + "learning_rate": 4.663757931451665e-05, + "loss": 0.1731, + "step": 59580 + }, + { + "epoch": 2.3008610371056797, + "grad_norm": 3.8065452575683594, + "learning_rate": 4.661183829491486e-05, + "loss": 0.3464, + "step": 59590 + }, + { + "epoch": 2.3012471523997067, + "grad_norm": 0.10512294620275497, + "learning_rate": 4.658609727531308e-05, + "loss": 0.1584, + "step": 59600 + }, + { + "epoch": 2.301633267693733, + "grad_norm": 1.227293848991394, + "learning_rate": 4.6560356255711293e-05, + "loss": 0.2381, + "step": 59610 + }, + { + "epoch": 2.30201938298776, + "grad_norm": 0.2790459394454956, + "learning_rate": 4.65346152361095e-05, + "loss": 0.162, + "step": 59620 + }, + { + "epoch": 2.3024054982817868, + "grad_norm": 0.571444034576416, + "learning_rate": 4.650887421650772e-05, + "loss": 0.1354, + "step": 59630 + }, + { + "epoch": 2.3027916135758137, + "grad_norm": 0.6388635635375977, + "learning_rate": 4.648313319690593e-05, + "loss": 0.0969, + "step": 59640 + }, + { + "epoch": 2.3031777288698407, + "grad_norm": 3.020249843597412, + "learning_rate": 4.645739217730414e-05, + "loss": 0.15, + "step": 59650 + }, + { + "epoch": 2.3035638441638673, + "grad_norm": 1.6712716817855835, + "learning_rate": 4.643165115770236e-05, + "loss": 0.1445, + "step": 59660 + }, + { + "epoch": 2.3039499594578943, + "grad_norm": 0.4937114119529724, + "learning_rate": 4.640591013810057e-05, + "loss": 0.0823, + "step": 59670 + }, + { + "epoch": 2.304336074751921, + "grad_norm": 1.7938934564590454, + "learning_rate": 4.6380169118498787e-05, + "loss": 0.164, + "step": 59680 + }, + { + "epoch": 2.304722190045948, + "grad_norm": 1.9819937944412231, + "learning_rate": 4.6354428098897e-05, + "loss": 0.1753, + "step": 59690 + }, + { + "epoch": 2.3051083053399744, + "grad_norm": 1.2775839567184448, + "learning_rate": 4.632868707929521e-05, + "loss": 0.1875, + "step": 59700 + }, + { + "epoch": 2.3054944206340013, + "grad_norm": 0.775601863861084, + "learning_rate": 4.6302946059693427e-05, + "loss": 0.0737, + "step": 59710 + }, + { + "epoch": 2.3058805359280283, + "grad_norm": 0.9071961045265198, + "learning_rate": 4.627720504009164e-05, + "loss": 0.1632, + "step": 59720 + }, + { + "epoch": 2.306266651222055, + "grad_norm": 1.4232882261276245, + "learning_rate": 4.625146402048985e-05, + "loss": 0.2169, + "step": 59730 + }, + { + "epoch": 2.306652766516082, + "grad_norm": 0.5946634411811829, + "learning_rate": 4.6225723000888066e-05, + "loss": 0.1919, + "step": 59740 + }, + { + "epoch": 2.3070388818101084, + "grad_norm": 0.14276131987571716, + "learning_rate": 4.6199981981286287e-05, + "loss": 0.1415, + "step": 59750 + }, + { + "epoch": 2.3074249971041354, + "grad_norm": 1.5616459846496582, + "learning_rate": 4.617424096168449e-05, + "loss": 0.1668, + "step": 59760 + }, + { + "epoch": 2.307811112398162, + "grad_norm": 0.34254691004753113, + "learning_rate": 4.6148499942082706e-05, + "loss": 0.2122, + "step": 59770 + }, + { + "epoch": 2.308197227692189, + "grad_norm": 0.34764161705970764, + "learning_rate": 4.6122758922480926e-05, + "loss": 0.1604, + "step": 59780 + }, + { + "epoch": 2.308583342986216, + "grad_norm": 3.1887755393981934, + "learning_rate": 4.609701790287913e-05, + "loss": 0.124, + "step": 59790 + }, + { + "epoch": 2.3089694582802425, + "grad_norm": 0.8155665397644043, + "learning_rate": 4.6071276883277346e-05, + "loss": 0.1095, + "step": 59800 + }, + { + "epoch": 2.3093555735742695, + "grad_norm": 0.2734861373901367, + "learning_rate": 4.6045535863675566e-05, + "loss": 0.2389, + "step": 59810 + }, + { + "epoch": 2.309741688868296, + "grad_norm": 0.5696996450424194, + "learning_rate": 4.601979484407378e-05, + "loss": 0.0787, + "step": 59820 + }, + { + "epoch": 2.310127804162323, + "grad_norm": 0.49777019023895264, + "learning_rate": 4.5994053824471986e-05, + "loss": 0.1815, + "step": 59830 + }, + { + "epoch": 2.3105139194563495, + "grad_norm": 0.001843929523602128, + "learning_rate": 4.59683128048702e-05, + "loss": 0.0834, + "step": 59840 + }, + { + "epoch": 2.3109000347503765, + "grad_norm": 0.1088326945900917, + "learning_rate": 4.594257178526842e-05, + "loss": 0.1821, + "step": 59850 + }, + { + "epoch": 2.311286150044403, + "grad_norm": 0.4867718815803528, + "learning_rate": 4.591683076566663e-05, + "loss": 0.2175, + "step": 59860 + }, + { + "epoch": 2.31167226533843, + "grad_norm": 0.759501576423645, + "learning_rate": 4.589108974606484e-05, + "loss": 0.1041, + "step": 59870 + }, + { + "epoch": 2.3120583806324566, + "grad_norm": 1.3260136842727661, + "learning_rate": 4.586534872646306e-05, + "loss": 0.2017, + "step": 59880 + }, + { + "epoch": 2.3124444959264836, + "grad_norm": 3.664853572845459, + "learning_rate": 4.583960770686127e-05, + "loss": 0.2322, + "step": 59890 + }, + { + "epoch": 2.3128306112205106, + "grad_norm": 0.9447748064994812, + "learning_rate": 4.581386668725948e-05, + "loss": 0.1124, + "step": 59900 + }, + { + "epoch": 2.313216726514537, + "grad_norm": 2.8684070110321045, + "learning_rate": 4.57881256676577e-05, + "loss": 0.1271, + "step": 59910 + }, + { + "epoch": 2.313602841808564, + "grad_norm": 2.4595460891723633, + "learning_rate": 4.576238464805591e-05, + "loss": 0.1695, + "step": 59920 + }, + { + "epoch": 2.3139889571025907, + "grad_norm": 0.8317721486091614, + "learning_rate": 4.5736643628454126e-05, + "loss": 0.1341, + "step": 59930 + }, + { + "epoch": 2.3143750723966177, + "grad_norm": 1.8924741744995117, + "learning_rate": 4.571090260885234e-05, + "loss": 0.3389, + "step": 59940 + }, + { + "epoch": 2.314761187690644, + "grad_norm": 0.22777517139911652, + "learning_rate": 4.568516158925055e-05, + "loss": 0.1406, + "step": 59950 + }, + { + "epoch": 2.315147302984671, + "grad_norm": 1.9150850772857666, + "learning_rate": 4.5659420569648766e-05, + "loss": 0.1615, + "step": 59960 + }, + { + "epoch": 2.315533418278698, + "grad_norm": 1.361603856086731, + "learning_rate": 4.563367955004698e-05, + "loss": 0.1616, + "step": 59970 + }, + { + "epoch": 2.3159195335727247, + "grad_norm": 1.1094087362289429, + "learning_rate": 4.560793853044519e-05, + "loss": 0.1923, + "step": 59980 + }, + { + "epoch": 2.3163056488667517, + "grad_norm": 0.5429170727729797, + "learning_rate": 4.5582197510843406e-05, + "loss": 0.2126, + "step": 59990 + }, + { + "epoch": 2.3166917641607783, + "grad_norm": 0.8391672968864441, + "learning_rate": 4.555645649124162e-05, + "loss": 0.246, + "step": 60000 + }, + { + "epoch": 2.3170778794548053, + "grad_norm": 0.6325327157974243, + "learning_rate": 4.553071547163983e-05, + "loss": 0.1385, + "step": 60010 + }, + { + "epoch": 2.317463994748832, + "grad_norm": 0.4999966025352478, + "learning_rate": 4.5504974452038046e-05, + "loss": 0.2083, + "step": 60020 + }, + { + "epoch": 2.317850110042859, + "grad_norm": 2.6003923416137695, + "learning_rate": 4.5479233432436266e-05, + "loss": 0.1553, + "step": 60030 + }, + { + "epoch": 2.318236225336886, + "grad_norm": 2.107546091079712, + "learning_rate": 4.545349241283447e-05, + "loss": 0.2252, + "step": 60040 + }, + { + "epoch": 2.3186223406309123, + "grad_norm": 0.4469972550868988, + "learning_rate": 4.5427751393232686e-05, + "loss": 0.1913, + "step": 60050 + }, + { + "epoch": 2.3190084559249393, + "grad_norm": 1.0861988067626953, + "learning_rate": 4.5402010373630906e-05, + "loss": 0.16, + "step": 60060 + }, + { + "epoch": 2.319394571218966, + "grad_norm": 1.6397405862808228, + "learning_rate": 4.537626935402912e-05, + "loss": 0.1527, + "step": 60070 + }, + { + "epoch": 2.319780686512993, + "grad_norm": 2.6963932514190674, + "learning_rate": 4.5350528334427326e-05, + "loss": 0.2445, + "step": 60080 + }, + { + "epoch": 2.3201668018070194, + "grad_norm": 1.8489919900894165, + "learning_rate": 4.532478731482554e-05, + "loss": 0.1972, + "step": 60090 + }, + { + "epoch": 2.3205529171010464, + "grad_norm": 1.108892560005188, + "learning_rate": 4.529904629522376e-05, + "loss": 0.1301, + "step": 60100 + }, + { + "epoch": 2.3209390323950734, + "grad_norm": 1.8046364784240723, + "learning_rate": 4.5273305275621966e-05, + "loss": 0.184, + "step": 60110 + }, + { + "epoch": 2.3213251476891, + "grad_norm": 0.346176415681839, + "learning_rate": 4.524756425602018e-05, + "loss": 0.0723, + "step": 60120 + }, + { + "epoch": 2.321711262983127, + "grad_norm": 0.8505159020423889, + "learning_rate": 4.52218232364184e-05, + "loss": 0.1994, + "step": 60130 + }, + { + "epoch": 2.3220973782771535, + "grad_norm": 1.0866034030914307, + "learning_rate": 4.519608221681661e-05, + "loss": 0.097, + "step": 60140 + }, + { + "epoch": 2.3224834935711804, + "grad_norm": 2.6288974285125732, + "learning_rate": 4.517034119721482e-05, + "loss": 0.1419, + "step": 60150 + }, + { + "epoch": 2.322869608865207, + "grad_norm": 0.4779375195503235, + "learning_rate": 4.514460017761304e-05, + "loss": 0.0999, + "step": 60160 + }, + { + "epoch": 2.323255724159234, + "grad_norm": 1.1789641380310059, + "learning_rate": 4.511885915801125e-05, + "loss": 0.1184, + "step": 60170 + }, + { + "epoch": 2.323641839453261, + "grad_norm": 0.409001886844635, + "learning_rate": 4.5093118138409466e-05, + "loss": 0.1913, + "step": 60180 + }, + { + "epoch": 2.3240279547472875, + "grad_norm": 1.5853062868118286, + "learning_rate": 4.506737711880768e-05, + "loss": 0.1941, + "step": 60190 + }, + { + "epoch": 2.3244140700413145, + "grad_norm": 3.547631025314331, + "learning_rate": 4.504163609920589e-05, + "loss": 0.2272, + "step": 60200 + }, + { + "epoch": 2.324800185335341, + "grad_norm": 0.3084076941013336, + "learning_rate": 4.5015895079604106e-05, + "loss": 0.1543, + "step": 60210 + }, + { + "epoch": 2.325186300629368, + "grad_norm": 0.055422622710466385, + "learning_rate": 4.499015406000232e-05, + "loss": 0.1878, + "step": 60220 + }, + { + "epoch": 2.3255724159233946, + "grad_norm": 0.7743698954582214, + "learning_rate": 4.496441304040053e-05, + "loss": 0.2342, + "step": 60230 + }, + { + "epoch": 2.3259585312174216, + "grad_norm": 0.940558671951294, + "learning_rate": 4.4938672020798746e-05, + "loss": 0.3158, + "step": 60240 + }, + { + "epoch": 2.3263446465114486, + "grad_norm": 0.35941219329833984, + "learning_rate": 4.491293100119696e-05, + "loss": 0.15, + "step": 60250 + }, + { + "epoch": 2.326730761805475, + "grad_norm": 0.5877255201339722, + "learning_rate": 4.488718998159517e-05, + "loss": 0.2713, + "step": 60260 + }, + { + "epoch": 2.327116877099502, + "grad_norm": 1.0649851560592651, + "learning_rate": 4.4861448961993386e-05, + "loss": 0.1789, + "step": 60270 + }, + { + "epoch": 2.3275029923935286, + "grad_norm": 0.9025186896324158, + "learning_rate": 4.4835707942391606e-05, + "loss": 0.0883, + "step": 60280 + }, + { + "epoch": 2.3278891076875556, + "grad_norm": 0.08007670938968658, + "learning_rate": 4.480996692278981e-05, + "loss": 0.2019, + "step": 60290 + }, + { + "epoch": 2.328275222981582, + "grad_norm": 0.2300902009010315, + "learning_rate": 4.4784225903188025e-05, + "loss": 0.1448, + "step": 60300 + }, + { + "epoch": 2.328661338275609, + "grad_norm": 0.3912908732891083, + "learning_rate": 4.4758484883586246e-05, + "loss": 0.1869, + "step": 60310 + }, + { + "epoch": 2.3290474535696357, + "grad_norm": 0.5869823098182678, + "learning_rate": 4.473274386398445e-05, + "loss": 0.0829, + "step": 60320 + }, + { + "epoch": 2.3294335688636627, + "grad_norm": 0.5597032308578491, + "learning_rate": 4.4707002844382665e-05, + "loss": 0.1639, + "step": 60330 + }, + { + "epoch": 2.3298196841576893, + "grad_norm": 0.3138620853424072, + "learning_rate": 4.468126182478088e-05, + "loss": 0.1028, + "step": 60340 + }, + { + "epoch": 2.3302057994517162, + "grad_norm": 0.10838694870471954, + "learning_rate": 4.46555208051791e-05, + "loss": 0.1406, + "step": 60350 + }, + { + "epoch": 2.3305919147457432, + "grad_norm": 1.1807544231414795, + "learning_rate": 4.4629779785577305e-05, + "loss": 0.1183, + "step": 60360 + }, + { + "epoch": 2.3309780300397698, + "grad_norm": 0.2713087499141693, + "learning_rate": 4.460403876597552e-05, + "loss": 0.108, + "step": 60370 + }, + { + "epoch": 2.3313641453337968, + "grad_norm": 0.8787125945091248, + "learning_rate": 4.457829774637374e-05, + "loss": 0.2446, + "step": 60380 + }, + { + "epoch": 2.3317502606278233, + "grad_norm": 1.3905388116836548, + "learning_rate": 4.455255672677195e-05, + "loss": 0.1485, + "step": 60390 + }, + { + "epoch": 2.3321363759218503, + "grad_norm": 1.5408064126968384, + "learning_rate": 4.452681570717016e-05, + "loss": 0.2076, + "step": 60400 + }, + { + "epoch": 2.332522491215877, + "grad_norm": 0.034131214022636414, + "learning_rate": 4.450107468756838e-05, + "loss": 0.0905, + "step": 60410 + }, + { + "epoch": 2.332908606509904, + "grad_norm": 1.067578673362732, + "learning_rate": 4.447533366796659e-05, + "loss": 0.1221, + "step": 60420 + }, + { + "epoch": 2.333294721803931, + "grad_norm": 1.5126134157180786, + "learning_rate": 4.44495926483648e-05, + "loss": 0.5139, + "step": 60430 + }, + { + "epoch": 2.3336808370979574, + "grad_norm": 3.148021697998047, + "learning_rate": 4.442385162876302e-05, + "loss": 0.2504, + "step": 60440 + }, + { + "epoch": 2.3340669523919844, + "grad_norm": 0.7882575392723083, + "learning_rate": 4.439811060916123e-05, + "loss": 0.2593, + "step": 60450 + }, + { + "epoch": 2.334453067686011, + "grad_norm": 1.124514102935791, + "learning_rate": 4.4372369589559445e-05, + "loss": 0.303, + "step": 60460 + }, + { + "epoch": 2.334839182980038, + "grad_norm": 0.064745232462883, + "learning_rate": 4.434662856995766e-05, + "loss": 0.1734, + "step": 60470 + }, + { + "epoch": 2.3352252982740644, + "grad_norm": 0.08392655104398727, + "learning_rate": 4.432088755035587e-05, + "loss": 0.1492, + "step": 60480 + }, + { + "epoch": 2.3356114135680914, + "grad_norm": 0.9123765230178833, + "learning_rate": 4.4295146530754085e-05, + "loss": 0.1689, + "step": 60490 + }, + { + "epoch": 2.3359975288621184, + "grad_norm": 1.0740617513656616, + "learning_rate": 4.42694055111523e-05, + "loss": 0.1954, + "step": 60500 + }, + { + "epoch": 2.336383644156145, + "grad_norm": 0.409637987613678, + "learning_rate": 4.424366449155051e-05, + "loss": 0.2164, + "step": 60510 + }, + { + "epoch": 2.336769759450172, + "grad_norm": 2.4724161624908447, + "learning_rate": 4.4217923471948725e-05, + "loss": 0.2736, + "step": 60520 + }, + { + "epoch": 2.3371558747441985, + "grad_norm": 1.966937780380249, + "learning_rate": 4.4192182452346945e-05, + "loss": 0.2463, + "step": 60530 + }, + { + "epoch": 2.3375419900382255, + "grad_norm": 1.0093505382537842, + "learning_rate": 4.416644143274515e-05, + "loss": 0.1524, + "step": 60540 + }, + { + "epoch": 2.337928105332252, + "grad_norm": 0.9283536076545715, + "learning_rate": 4.4140700413143365e-05, + "loss": 0.1174, + "step": 60550 + }, + { + "epoch": 2.338314220626279, + "grad_norm": 1.4851809740066528, + "learning_rate": 4.4114959393541585e-05, + "loss": 0.1088, + "step": 60560 + }, + { + "epoch": 2.338700335920306, + "grad_norm": 2.666454315185547, + "learning_rate": 4.408921837393979e-05, + "loss": 0.186, + "step": 60570 + }, + { + "epoch": 2.3390864512143326, + "grad_norm": 0.34516963362693787, + "learning_rate": 4.4063477354338005e-05, + "loss": 0.1364, + "step": 60580 + }, + { + "epoch": 2.3394725665083596, + "grad_norm": 1.697920560836792, + "learning_rate": 4.4037736334736225e-05, + "loss": 0.1708, + "step": 60590 + }, + { + "epoch": 2.339858681802386, + "grad_norm": 1.542663812637329, + "learning_rate": 4.401199531513444e-05, + "loss": 0.2926, + "step": 60600 + }, + { + "epoch": 2.340244797096413, + "grad_norm": 1.080894112586975, + "learning_rate": 4.3986254295532645e-05, + "loss": 0.1114, + "step": 60610 + }, + { + "epoch": 2.3406309123904396, + "grad_norm": 0.7464519739151001, + "learning_rate": 4.396051327593086e-05, + "loss": 0.2846, + "step": 60620 + }, + { + "epoch": 2.3410170276844666, + "grad_norm": 1.4161779880523682, + "learning_rate": 4.393477225632908e-05, + "loss": 0.1181, + "step": 60630 + }, + { + "epoch": 2.3414031429784936, + "grad_norm": 0.5969855785369873, + "learning_rate": 4.390903123672729e-05, + "loss": 0.2574, + "step": 60640 + }, + { + "epoch": 2.34178925827252, + "grad_norm": 2.8204824924468994, + "learning_rate": 4.38832902171255e-05, + "loss": 0.1496, + "step": 60650 + }, + { + "epoch": 2.342175373566547, + "grad_norm": 0.5998751521110535, + "learning_rate": 4.385754919752372e-05, + "loss": 0.1849, + "step": 60660 + }, + { + "epoch": 2.3425614888605737, + "grad_norm": 0.5183271169662476, + "learning_rate": 4.383180817792193e-05, + "loss": 0.1577, + "step": 60670 + }, + { + "epoch": 2.3429476041546007, + "grad_norm": 3.008211374282837, + "learning_rate": 4.380606715832014e-05, + "loss": 0.2106, + "step": 60680 + }, + { + "epoch": 2.3433337194486272, + "grad_norm": 0.026534082368016243, + "learning_rate": 4.378032613871836e-05, + "loss": 0.0859, + "step": 60690 + }, + { + "epoch": 2.343719834742654, + "grad_norm": 0.7964476943016052, + "learning_rate": 4.375458511911657e-05, + "loss": 0.2276, + "step": 60700 + }, + { + "epoch": 2.3441059500366808, + "grad_norm": 0.9255203008651733, + "learning_rate": 4.3728844099514785e-05, + "loss": 0.1278, + "step": 60710 + }, + { + "epoch": 2.3444920653307078, + "grad_norm": 0.06721694767475128, + "learning_rate": 4.3703103079913e-05, + "loss": 0.188, + "step": 60720 + }, + { + "epoch": 2.3448781806247343, + "grad_norm": 1.5369101762771606, + "learning_rate": 4.367736206031121e-05, + "loss": 0.2286, + "step": 60730 + }, + { + "epoch": 2.3452642959187613, + "grad_norm": 2.3550243377685547, + "learning_rate": 4.3651621040709425e-05, + "loss": 0.1924, + "step": 60740 + }, + { + "epoch": 2.3456504112127883, + "grad_norm": 1.436240553855896, + "learning_rate": 4.362588002110764e-05, + "loss": 0.179, + "step": 60750 + }, + { + "epoch": 2.346036526506815, + "grad_norm": 4.520357131958008, + "learning_rate": 4.360013900150585e-05, + "loss": 0.3386, + "step": 60760 + }, + { + "epoch": 2.346422641800842, + "grad_norm": 2.983982563018799, + "learning_rate": 4.3574397981904065e-05, + "loss": 0.178, + "step": 60770 + }, + { + "epoch": 2.3468087570948684, + "grad_norm": 0.429884672164917, + "learning_rate": 4.354865696230228e-05, + "loss": 0.0962, + "step": 60780 + }, + { + "epoch": 2.3471948723888953, + "grad_norm": 0.5854440927505493, + "learning_rate": 4.352291594270049e-05, + "loss": 0.1934, + "step": 60790 + }, + { + "epoch": 2.347580987682922, + "grad_norm": 1.087031602859497, + "learning_rate": 4.3497174923098705e-05, + "loss": 0.1191, + "step": 60800 + }, + { + "epoch": 2.347967102976949, + "grad_norm": 1.5751805305480957, + "learning_rate": 4.3471433903496925e-05, + "loss": 0.147, + "step": 60810 + }, + { + "epoch": 2.348353218270976, + "grad_norm": 0.03129373490810394, + "learning_rate": 4.344569288389513e-05, + "loss": 0.1388, + "step": 60820 + }, + { + "epoch": 2.3487393335650024, + "grad_norm": 1.380611777305603, + "learning_rate": 4.3419951864293344e-05, + "loss": 0.2517, + "step": 60830 + }, + { + "epoch": 2.3491254488590294, + "grad_norm": 0.5196431279182434, + "learning_rate": 4.3394210844691565e-05, + "loss": 0.1825, + "step": 60840 + }, + { + "epoch": 2.349511564153056, + "grad_norm": 0.729242205619812, + "learning_rate": 4.336846982508978e-05, + "loss": 0.1716, + "step": 60850 + }, + { + "epoch": 2.349897679447083, + "grad_norm": 0.4489123225212097, + "learning_rate": 4.3342728805487984e-05, + "loss": 0.1318, + "step": 60860 + }, + { + "epoch": 2.3502837947411095, + "grad_norm": 0.41065114736557007, + "learning_rate": 4.33169877858862e-05, + "loss": 0.1579, + "step": 60870 + }, + { + "epoch": 2.3506699100351365, + "grad_norm": 1.2845816612243652, + "learning_rate": 4.329124676628442e-05, + "loss": 0.1613, + "step": 60880 + }, + { + "epoch": 2.3510560253291635, + "grad_norm": 1.17366623878479, + "learning_rate": 4.3265505746682624e-05, + "loss": 0.147, + "step": 60890 + }, + { + "epoch": 2.35144214062319, + "grad_norm": 1.3020472526550293, + "learning_rate": 4.323976472708084e-05, + "loss": 0.284, + "step": 60900 + }, + { + "epoch": 2.351828255917217, + "grad_norm": 0.9030712842941284, + "learning_rate": 4.321402370747906e-05, + "loss": 0.2745, + "step": 60910 + }, + { + "epoch": 2.3522143712112435, + "grad_norm": 0.5040395259857178, + "learning_rate": 4.318828268787727e-05, + "loss": 0.1001, + "step": 60920 + }, + { + "epoch": 2.3526004865052705, + "grad_norm": 0.2728300094604492, + "learning_rate": 4.316254166827548e-05, + "loss": 0.1126, + "step": 60930 + }, + { + "epoch": 2.352986601799297, + "grad_norm": 0.5255390405654907, + "learning_rate": 4.31368006486737e-05, + "loss": 0.1743, + "step": 60940 + }, + { + "epoch": 2.353372717093324, + "grad_norm": 0.05678205192089081, + "learning_rate": 4.311105962907191e-05, + "loss": 0.2214, + "step": 60950 + }, + { + "epoch": 2.353758832387351, + "grad_norm": 0.9271873235702515, + "learning_rate": 4.3085318609470124e-05, + "loss": 0.2263, + "step": 60960 + }, + { + "epoch": 2.3541449476813776, + "grad_norm": 1.0438083410263062, + "learning_rate": 4.305957758986834e-05, + "loss": 0.3506, + "step": 60970 + }, + { + "epoch": 2.3545310629754046, + "grad_norm": 1.231704831123352, + "learning_rate": 4.303383657026655e-05, + "loss": 0.2058, + "step": 60980 + }, + { + "epoch": 2.354917178269431, + "grad_norm": 1.4421464204788208, + "learning_rate": 4.3008095550664764e-05, + "loss": 0.182, + "step": 60990 + }, + { + "epoch": 2.355303293563458, + "grad_norm": 0.883007287979126, + "learning_rate": 4.298235453106298e-05, + "loss": 0.1714, + "step": 61000 + }, + { + "epoch": 2.3556894088574847, + "grad_norm": 0.49014022946357727, + "learning_rate": 4.295661351146119e-05, + "loss": 0.0846, + "step": 61010 + }, + { + "epoch": 2.3560755241515117, + "grad_norm": 1.3218421936035156, + "learning_rate": 4.2930872491859404e-05, + "loss": 0.0946, + "step": 61020 + }, + { + "epoch": 2.3564616394455387, + "grad_norm": 0.08953634649515152, + "learning_rate": 4.290513147225762e-05, + "loss": 0.2867, + "step": 61030 + }, + { + "epoch": 2.356847754739565, + "grad_norm": 1.4117354154586792, + "learning_rate": 4.287939045265583e-05, + "loss": 0.2493, + "step": 61040 + }, + { + "epoch": 2.357233870033592, + "grad_norm": 0.7844822406768799, + "learning_rate": 4.2853649433054044e-05, + "loss": 0.2385, + "step": 61050 + }, + { + "epoch": 2.3576199853276187, + "grad_norm": 0.0865604355931282, + "learning_rate": 4.2827908413452264e-05, + "loss": 0.0795, + "step": 61060 + }, + { + "epoch": 2.3580061006216457, + "grad_norm": 0.49625343084335327, + "learning_rate": 4.280216739385047e-05, + "loss": 0.0894, + "step": 61070 + }, + { + "epoch": 2.3583922159156723, + "grad_norm": 0.039102040231227875, + "learning_rate": 4.2776426374248684e-05, + "loss": 0.1134, + "step": 61080 + }, + { + "epoch": 2.3587783312096993, + "grad_norm": 0.6378281116485596, + "learning_rate": 4.2750685354646904e-05, + "loss": 0.2716, + "step": 61090 + }, + { + "epoch": 2.3591644465037263, + "grad_norm": 1.9550119638442993, + "learning_rate": 4.272494433504511e-05, + "loss": 0.2008, + "step": 61100 + }, + { + "epoch": 2.359550561797753, + "grad_norm": 0.18372145295143127, + "learning_rate": 4.2699203315443324e-05, + "loss": 0.1762, + "step": 61110 + }, + { + "epoch": 2.35993667709178, + "grad_norm": 0.24232423305511475, + "learning_rate": 4.2673462295841544e-05, + "loss": 0.3316, + "step": 61120 + }, + { + "epoch": 2.3603227923858063, + "grad_norm": 1.474071741104126, + "learning_rate": 4.264772127623976e-05, + "loss": 0.1938, + "step": 61130 + }, + { + "epoch": 2.3607089076798333, + "grad_norm": 0.43742164969444275, + "learning_rate": 4.2621980256637964e-05, + "loss": 0.1667, + "step": 61140 + }, + { + "epoch": 2.36109502297386, + "grad_norm": 0.505805253982544, + "learning_rate": 4.259623923703618e-05, + "loss": 0.2649, + "step": 61150 + }, + { + "epoch": 2.361481138267887, + "grad_norm": 0.8793296813964844, + "learning_rate": 4.25704982174344e-05, + "loss": 0.1231, + "step": 61160 + }, + { + "epoch": 2.3618672535619134, + "grad_norm": 0.19883646070957184, + "learning_rate": 4.254475719783261e-05, + "loss": 0.163, + "step": 61170 + }, + { + "epoch": 2.3622533688559404, + "grad_norm": 0.4133305847644806, + "learning_rate": 4.251901617823082e-05, + "loss": 0.1632, + "step": 61180 + }, + { + "epoch": 2.362639484149967, + "grad_norm": 0.8530174493789673, + "learning_rate": 4.249327515862904e-05, + "loss": 0.0851, + "step": 61190 + }, + { + "epoch": 2.363025599443994, + "grad_norm": 1.6462198495864868, + "learning_rate": 4.246753413902725e-05, + "loss": 0.2051, + "step": 61200 + }, + { + "epoch": 2.363411714738021, + "grad_norm": 1.284153699874878, + "learning_rate": 4.244179311942546e-05, + "loss": 0.227, + "step": 61210 + }, + { + "epoch": 2.3637978300320475, + "grad_norm": 0.5583304166793823, + "learning_rate": 4.241605209982368e-05, + "loss": 0.0665, + "step": 61220 + }, + { + "epoch": 2.3641839453260745, + "grad_norm": 1.0726197957992554, + "learning_rate": 4.239031108022189e-05, + "loss": 0.1465, + "step": 61230 + }, + { + "epoch": 2.364570060620101, + "grad_norm": 0.15589381754398346, + "learning_rate": 4.2364570060620104e-05, + "loss": 0.1599, + "step": 61240 + }, + { + "epoch": 2.364956175914128, + "grad_norm": 1.9759862422943115, + "learning_rate": 4.233882904101832e-05, + "loss": 0.2904, + "step": 61250 + }, + { + "epoch": 2.3653422912081545, + "grad_norm": 0.20566493272781372, + "learning_rate": 4.231308802141653e-05, + "loss": 0.2447, + "step": 61260 + }, + { + "epoch": 2.3657284065021815, + "grad_norm": 0.33343741297721863, + "learning_rate": 4.2287347001814744e-05, + "loss": 0.2157, + "step": 61270 + }, + { + "epoch": 2.3661145217962085, + "grad_norm": 0.6890573501586914, + "learning_rate": 4.226160598221296e-05, + "loss": 0.1866, + "step": 61280 + }, + { + "epoch": 2.366500637090235, + "grad_norm": 0.1372109055519104, + "learning_rate": 4.223586496261117e-05, + "loss": 0.1851, + "step": 61290 + }, + { + "epoch": 2.366886752384262, + "grad_norm": 0.8812543749809265, + "learning_rate": 4.2210123943009384e-05, + "loss": 0.1186, + "step": 61300 + }, + { + "epoch": 2.3672728676782886, + "grad_norm": 0.7651077508926392, + "learning_rate": 4.2184382923407604e-05, + "loss": 0.1168, + "step": 61310 + }, + { + "epoch": 2.3676589829723156, + "grad_norm": 0.886715292930603, + "learning_rate": 4.215864190380581e-05, + "loss": 0.1403, + "step": 61320 + }, + { + "epoch": 2.368045098266342, + "grad_norm": 1.4525467157363892, + "learning_rate": 4.2132900884204024e-05, + "loss": 0.0951, + "step": 61330 + }, + { + "epoch": 2.368431213560369, + "grad_norm": 1.490551233291626, + "learning_rate": 4.2107159864602244e-05, + "loss": 0.1127, + "step": 61340 + }, + { + "epoch": 2.368817328854396, + "grad_norm": 1.7452077865600586, + "learning_rate": 4.208141884500045e-05, + "loss": 0.0958, + "step": 61350 + }, + { + "epoch": 2.3692034441484227, + "grad_norm": 1.6857271194458008, + "learning_rate": 4.2055677825398664e-05, + "loss": 0.1731, + "step": 61360 + }, + { + "epoch": 2.3695895594424496, + "grad_norm": 0.5354145765304565, + "learning_rate": 4.2029936805796884e-05, + "loss": 0.1051, + "step": 61370 + }, + { + "epoch": 2.369975674736476, + "grad_norm": 0.18171580135822296, + "learning_rate": 4.20041957861951e-05, + "loss": 0.1761, + "step": 61380 + }, + { + "epoch": 2.370361790030503, + "grad_norm": 1.021549940109253, + "learning_rate": 4.1978454766593303e-05, + "loss": 0.1949, + "step": 61390 + }, + { + "epoch": 2.3707479053245297, + "grad_norm": 1.4387668371200562, + "learning_rate": 4.195271374699152e-05, + "loss": 0.1864, + "step": 61400 + }, + { + "epoch": 2.3711340206185567, + "grad_norm": 0.24176666140556335, + "learning_rate": 4.192697272738974e-05, + "loss": 0.2683, + "step": 61410 + }, + { + "epoch": 2.3715201359125837, + "grad_norm": 1.2240315675735474, + "learning_rate": 4.190123170778795e-05, + "loss": 0.195, + "step": 61420 + }, + { + "epoch": 2.3719062512066102, + "grad_norm": 2.242389440536499, + "learning_rate": 4.187549068818616e-05, + "loss": 0.1074, + "step": 61430 + }, + { + "epoch": 2.3722923665006372, + "grad_norm": 0.7379412055015564, + "learning_rate": 4.184974966858438e-05, + "loss": 0.1394, + "step": 61440 + }, + { + "epoch": 2.372678481794664, + "grad_norm": 1.3384835720062256, + "learning_rate": 4.182400864898259e-05, + "loss": 0.248, + "step": 61450 + }, + { + "epoch": 2.3730645970886908, + "grad_norm": 0.23063971102237701, + "learning_rate": 4.17982676293808e-05, + "loss": 0.1458, + "step": 61460 + }, + { + "epoch": 2.3734507123827173, + "grad_norm": 0.6873703598976135, + "learning_rate": 4.177252660977902e-05, + "loss": 0.1315, + "step": 61470 + }, + { + "epoch": 2.3738368276767443, + "grad_norm": 1.462497591972351, + "learning_rate": 4.174678559017723e-05, + "loss": 0.1031, + "step": 61480 + }, + { + "epoch": 2.3742229429707713, + "grad_norm": 1.403594732284546, + "learning_rate": 4.172104457057544e-05, + "loss": 0.1962, + "step": 61490 + }, + { + "epoch": 2.374609058264798, + "grad_norm": 3.3132827281951904, + "learning_rate": 4.169530355097366e-05, + "loss": 0.243, + "step": 61500 + }, + { + "epoch": 2.374995173558825, + "grad_norm": 0.5474012494087219, + "learning_rate": 4.166956253137187e-05, + "loss": 0.1087, + "step": 61510 + }, + { + "epoch": 2.3753812888528514, + "grad_norm": 1.2518501281738281, + "learning_rate": 4.164382151177008e-05, + "loss": 0.1119, + "step": 61520 + }, + { + "epoch": 2.3757674041468784, + "grad_norm": 0.10591934621334076, + "learning_rate": 4.16180804921683e-05, + "loss": 0.238, + "step": 61530 + }, + { + "epoch": 2.376153519440905, + "grad_norm": 0.7095358967781067, + "learning_rate": 4.159233947256651e-05, + "loss": 0.1082, + "step": 61540 + }, + { + "epoch": 2.376539634734932, + "grad_norm": 0.09203200787305832, + "learning_rate": 4.156659845296472e-05, + "loss": 0.1178, + "step": 61550 + }, + { + "epoch": 2.376925750028959, + "grad_norm": 1.4663885831832886, + "learning_rate": 4.1540857433362937e-05, + "loss": 0.2273, + "step": 61560 + }, + { + "epoch": 2.3773118653229854, + "grad_norm": 0.9895615577697754, + "learning_rate": 4.151511641376115e-05, + "loss": 0.1222, + "step": 61570 + }, + { + "epoch": 2.3776979806170124, + "grad_norm": 0.7987017631530762, + "learning_rate": 4.148937539415936e-05, + "loss": 0.2083, + "step": 61580 + }, + { + "epoch": 2.378084095911039, + "grad_norm": 0.662470817565918, + "learning_rate": 4.146363437455758e-05, + "loss": 0.1489, + "step": 61590 + }, + { + "epoch": 2.378470211205066, + "grad_norm": 0.703076183795929, + "learning_rate": 4.143789335495579e-05, + "loss": 0.2176, + "step": 61600 + }, + { + "epoch": 2.3788563264990925, + "grad_norm": 0.6900975704193115, + "learning_rate": 4.1412152335354e-05, + "loss": 0.0967, + "step": 61610 + }, + { + "epoch": 2.3792424417931195, + "grad_norm": 0.4006218910217285, + "learning_rate": 4.138641131575222e-05, + "loss": 0.1943, + "step": 61620 + }, + { + "epoch": 2.379628557087146, + "grad_norm": 0.8131549954414368, + "learning_rate": 4.1360670296150437e-05, + "loss": 0.1458, + "step": 61630 + }, + { + "epoch": 2.380014672381173, + "grad_norm": 2.572120189666748, + "learning_rate": 4.133492927654864e-05, + "loss": 0.1892, + "step": 61640 + }, + { + "epoch": 2.3804007876751996, + "grad_norm": 1.40338134765625, + "learning_rate": 4.1309188256946856e-05, + "loss": 0.1081, + "step": 61650 + }, + { + "epoch": 2.3807869029692266, + "grad_norm": 2.1713101863861084, + "learning_rate": 4.1283447237345076e-05, + "loss": 0.1839, + "step": 61660 + }, + { + "epoch": 2.3811730182632536, + "grad_norm": 1.1773313283920288, + "learning_rate": 4.125770621774328e-05, + "loss": 0.1065, + "step": 61670 + }, + { + "epoch": 2.38155913355728, + "grad_norm": 2.314040184020996, + "learning_rate": 4.1231965198141496e-05, + "loss": 0.2298, + "step": 61680 + }, + { + "epoch": 2.381945248851307, + "grad_norm": 1.7708461284637451, + "learning_rate": 4.1206224178539716e-05, + "loss": 0.1068, + "step": 61690 + }, + { + "epoch": 2.3823313641453336, + "grad_norm": 0.168818861246109, + "learning_rate": 4.118048315893793e-05, + "loss": 0.0979, + "step": 61700 + }, + { + "epoch": 2.3827174794393606, + "grad_norm": 2.80302357673645, + "learning_rate": 4.1154742139336136e-05, + "loss": 0.3275, + "step": 61710 + }, + { + "epoch": 2.383103594733387, + "grad_norm": 0.5709852576255798, + "learning_rate": 4.1129001119734356e-05, + "loss": 0.1523, + "step": 61720 + }, + { + "epoch": 2.383489710027414, + "grad_norm": 0.4733193516731262, + "learning_rate": 4.110326010013257e-05, + "loss": 0.0958, + "step": 61730 + }, + { + "epoch": 2.383875825321441, + "grad_norm": 0.3468289375305176, + "learning_rate": 4.107751908053078e-05, + "loss": 0.1437, + "step": 61740 + }, + { + "epoch": 2.3842619406154677, + "grad_norm": 0.7683085203170776, + "learning_rate": 4.1051778060928996e-05, + "loss": 0.1915, + "step": 61750 + }, + { + "epoch": 2.3846480559094947, + "grad_norm": 0.9539376497268677, + "learning_rate": 4.102603704132721e-05, + "loss": 0.13, + "step": 61760 + }, + { + "epoch": 2.3850341712035212, + "grad_norm": 1.247579574584961, + "learning_rate": 4.100029602172542e-05, + "loss": 0.1212, + "step": 61770 + }, + { + "epoch": 2.3854202864975482, + "grad_norm": 0.5379541516304016, + "learning_rate": 4.0974555002123636e-05, + "loss": 0.4552, + "step": 61780 + }, + { + "epoch": 2.3858064017915748, + "grad_norm": 1.4555822610855103, + "learning_rate": 4.094881398252185e-05, + "loss": 0.1677, + "step": 61790 + }, + { + "epoch": 2.3861925170856018, + "grad_norm": 2.210245132446289, + "learning_rate": 4.092307296292006e-05, + "loss": 0.1099, + "step": 61800 + }, + { + "epoch": 2.3865786323796288, + "grad_norm": 0.2830033004283905, + "learning_rate": 4.0897331943318276e-05, + "loss": 0.0844, + "step": 61810 + }, + { + "epoch": 2.3869647476736553, + "grad_norm": 0.5923789739608765, + "learning_rate": 4.087159092371649e-05, + "loss": 0.0668, + "step": 61820 + }, + { + "epoch": 2.3873508629676823, + "grad_norm": 2.1576321125030518, + "learning_rate": 4.08458499041147e-05, + "loss": 0.2333, + "step": 61830 + }, + { + "epoch": 2.387736978261709, + "grad_norm": 0.09188230335712433, + "learning_rate": 4.082010888451292e-05, + "loss": 0.1849, + "step": 61840 + }, + { + "epoch": 2.388123093555736, + "grad_norm": 0.5188024640083313, + "learning_rate": 4.079436786491113e-05, + "loss": 0.0781, + "step": 61850 + }, + { + "epoch": 2.3885092088497624, + "grad_norm": 0.33355507254600525, + "learning_rate": 4.076862684530934e-05, + "loss": 0.1886, + "step": 61860 + }, + { + "epoch": 2.3888953241437894, + "grad_norm": 0.25753054022789, + "learning_rate": 4.074288582570756e-05, + "loss": 0.1492, + "step": 61870 + }, + { + "epoch": 2.3892814394378163, + "grad_norm": 0.47389110922813416, + "learning_rate": 4.071714480610577e-05, + "loss": 0.1576, + "step": 61880 + }, + { + "epoch": 2.389667554731843, + "grad_norm": 0.6938667297363281, + "learning_rate": 4.069140378650398e-05, + "loss": 0.1554, + "step": 61890 + }, + { + "epoch": 2.39005367002587, + "grad_norm": 0.5270907282829285, + "learning_rate": 4.06656627669022e-05, + "loss": 0.0905, + "step": 61900 + }, + { + "epoch": 2.3904397853198964, + "grad_norm": 0.972940981388092, + "learning_rate": 4.0639921747300416e-05, + "loss": 0.1008, + "step": 61910 + }, + { + "epoch": 2.3908259006139234, + "grad_norm": 0.3339834213256836, + "learning_rate": 4.061418072769862e-05, + "loss": 0.124, + "step": 61920 + }, + { + "epoch": 2.39121201590795, + "grad_norm": 0.40384066104888916, + "learning_rate": 4.0588439708096836e-05, + "loss": 0.1318, + "step": 61930 + }, + { + "epoch": 2.391598131201977, + "grad_norm": 1.3399138450622559, + "learning_rate": 4.0562698688495056e-05, + "loss": 0.2285, + "step": 61940 + }, + { + "epoch": 2.391984246496004, + "grad_norm": 1.2441486120224, + "learning_rate": 4.053695766889327e-05, + "loss": 0.1842, + "step": 61950 + }, + { + "epoch": 2.3923703617900305, + "grad_norm": 0.18500332534313202, + "learning_rate": 4.0511216649291476e-05, + "loss": 0.124, + "step": 61960 + }, + { + "epoch": 2.3927564770840575, + "grad_norm": 4.335320949554443, + "learning_rate": 4.0485475629689696e-05, + "loss": 0.2101, + "step": 61970 + }, + { + "epoch": 2.393142592378084, + "grad_norm": 1.7215917110443115, + "learning_rate": 4.045973461008791e-05, + "loss": 0.1828, + "step": 61980 + }, + { + "epoch": 2.393528707672111, + "grad_norm": 1.3829667568206787, + "learning_rate": 4.0433993590486116e-05, + "loss": 0.1872, + "step": 61990 + }, + { + "epoch": 2.3939148229661376, + "grad_norm": 0.8047557473182678, + "learning_rate": 4.0408252570884336e-05, + "loss": 0.195, + "step": 62000 + }, + { + "epoch": 2.3943009382601645, + "grad_norm": 0.04885184019804001, + "learning_rate": 4.038251155128255e-05, + "loss": 0.1502, + "step": 62010 + }, + { + "epoch": 2.394687053554191, + "grad_norm": 2.9263839721679688, + "learning_rate": 4.035677053168076e-05, + "loss": 0.2479, + "step": 62020 + }, + { + "epoch": 2.395073168848218, + "grad_norm": 1.2394524812698364, + "learning_rate": 4.0331029512078976e-05, + "loss": 0.1221, + "step": 62030 + }, + { + "epoch": 2.3954592841422446, + "grad_norm": 1.1224110126495361, + "learning_rate": 4.030528849247719e-05, + "loss": 0.1172, + "step": 62040 + }, + { + "epoch": 2.3958453994362716, + "grad_norm": 1.0132677555084229, + "learning_rate": 4.02795474728754e-05, + "loss": 0.2585, + "step": 62050 + }, + { + "epoch": 2.3962315147302986, + "grad_norm": 1.7612736225128174, + "learning_rate": 4.0253806453273616e-05, + "loss": 0.1449, + "step": 62060 + }, + { + "epoch": 2.396617630024325, + "grad_norm": 2.687474012374878, + "learning_rate": 4.022806543367183e-05, + "loss": 0.2116, + "step": 62070 + }, + { + "epoch": 2.397003745318352, + "grad_norm": 0.9632325768470764, + "learning_rate": 4.020232441407004e-05, + "loss": 0.233, + "step": 62080 + }, + { + "epoch": 2.3973898606123787, + "grad_norm": 0.385966956615448, + "learning_rate": 4.017658339446826e-05, + "loss": 0.2557, + "step": 62090 + }, + { + "epoch": 2.3977759759064057, + "grad_norm": 0.18261398375034332, + "learning_rate": 4.015084237486647e-05, + "loss": 0.0792, + "step": 62100 + }, + { + "epoch": 2.3981620912004322, + "grad_norm": 0.07081570476293564, + "learning_rate": 4.012510135526468e-05, + "loss": 0.121, + "step": 62110 + }, + { + "epoch": 2.398548206494459, + "grad_norm": 1.5726689100265503, + "learning_rate": 4.00993603356629e-05, + "loss": 0.235, + "step": 62120 + }, + { + "epoch": 2.398934321788486, + "grad_norm": 1.1276930570602417, + "learning_rate": 4.007361931606111e-05, + "loss": 0.2191, + "step": 62130 + }, + { + "epoch": 2.3993204370825127, + "grad_norm": 1.8213441371917725, + "learning_rate": 4.004787829645932e-05, + "loss": 0.2588, + "step": 62140 + }, + { + "epoch": 2.3997065523765397, + "grad_norm": 1.2340245246887207, + "learning_rate": 4.002213727685754e-05, + "loss": 0.2816, + "step": 62150 + }, + { + "epoch": 2.4000926676705663, + "grad_norm": 1.6360499858856201, + "learning_rate": 3.9996396257255756e-05, + "loss": 0.1142, + "step": 62160 + }, + { + "epoch": 2.4004787829645933, + "grad_norm": 0.48215198516845703, + "learning_rate": 3.997065523765396e-05, + "loss": 0.1013, + "step": 62170 + }, + { + "epoch": 2.40086489825862, + "grad_norm": 0.04493289813399315, + "learning_rate": 3.9944914218052175e-05, + "loss": 0.2127, + "step": 62180 + }, + { + "epoch": 2.401251013552647, + "grad_norm": 0.3863857686519623, + "learning_rate": 3.9919173198450396e-05, + "loss": 0.1712, + "step": 62190 + }, + { + "epoch": 2.401637128846674, + "grad_norm": 2.209010362625122, + "learning_rate": 3.98934321788486e-05, + "loss": 0.1541, + "step": 62200 + }, + { + "epoch": 2.4020232441407003, + "grad_norm": 0.5304957032203674, + "learning_rate": 3.9867691159246815e-05, + "loss": 0.2147, + "step": 62210 + }, + { + "epoch": 2.4024093594347273, + "grad_norm": 0.660261332988739, + "learning_rate": 3.9841950139645035e-05, + "loss": 0.054, + "step": 62220 + }, + { + "epoch": 2.402795474728754, + "grad_norm": 0.3104497194290161, + "learning_rate": 3.981620912004325e-05, + "loss": 0.1164, + "step": 62230 + }, + { + "epoch": 2.403181590022781, + "grad_norm": 0.6475027799606323, + "learning_rate": 3.9790468100441455e-05, + "loss": 0.1258, + "step": 62240 + }, + { + "epoch": 2.4035677053168074, + "grad_norm": 0.16110478341579437, + "learning_rate": 3.9764727080839675e-05, + "loss": 0.1574, + "step": 62250 + }, + { + "epoch": 2.4039538206108344, + "grad_norm": 2.295118808746338, + "learning_rate": 3.973898606123789e-05, + "loss": 0.1732, + "step": 62260 + }, + { + "epoch": 2.4043399359048614, + "grad_norm": 1.4980134963989258, + "learning_rate": 3.97132450416361e-05, + "loss": 0.2441, + "step": 62270 + }, + { + "epoch": 2.404726051198888, + "grad_norm": 1.1637049913406372, + "learning_rate": 3.9687504022034315e-05, + "loss": 0.2177, + "step": 62280 + }, + { + "epoch": 2.405112166492915, + "grad_norm": 0.2586102783679962, + "learning_rate": 3.966176300243253e-05, + "loss": 0.1486, + "step": 62290 + }, + { + "epoch": 2.4054982817869415, + "grad_norm": 1.9430426359176636, + "learning_rate": 3.963602198283074e-05, + "loss": 0.1609, + "step": 62300 + }, + { + "epoch": 2.4058843970809685, + "grad_norm": 1.1216020584106445, + "learning_rate": 3.9610280963228955e-05, + "loss": 0.1192, + "step": 62310 + }, + { + "epoch": 2.406270512374995, + "grad_norm": 0.34984323382377625, + "learning_rate": 3.958453994362717e-05, + "loss": 0.104, + "step": 62320 + }, + { + "epoch": 2.406656627669022, + "grad_norm": 3.057056427001953, + "learning_rate": 3.955879892402538e-05, + "loss": 0.259, + "step": 62330 + }, + { + "epoch": 2.407042742963049, + "grad_norm": 1.8370370864868164, + "learning_rate": 3.9533057904423595e-05, + "loss": 0.2345, + "step": 62340 + }, + { + "epoch": 2.4074288582570755, + "grad_norm": 0.6045883297920227, + "learning_rate": 3.950731688482181e-05, + "loss": 0.188, + "step": 62350 + }, + { + "epoch": 2.4078149735511025, + "grad_norm": 0.20036596059799194, + "learning_rate": 3.948157586522002e-05, + "loss": 0.1039, + "step": 62360 + }, + { + "epoch": 2.408201088845129, + "grad_norm": 0.1816219538450241, + "learning_rate": 3.945583484561824e-05, + "loss": 0.1453, + "step": 62370 + }, + { + "epoch": 2.408587204139156, + "grad_norm": 2.150385856628418, + "learning_rate": 3.943009382601645e-05, + "loss": 0.1803, + "step": 62380 + }, + { + "epoch": 2.4089733194331826, + "grad_norm": 1.5039875507354736, + "learning_rate": 3.940435280641466e-05, + "loss": 0.1951, + "step": 62390 + }, + { + "epoch": 2.4093594347272096, + "grad_norm": 1.9124608039855957, + "learning_rate": 3.937861178681288e-05, + "loss": 0.1488, + "step": 62400 + }, + { + "epoch": 2.4097455500212366, + "grad_norm": 0.5086666345596313, + "learning_rate": 3.9352870767211095e-05, + "loss": 0.2198, + "step": 62410 + }, + { + "epoch": 2.410131665315263, + "grad_norm": 0.7198240160942078, + "learning_rate": 3.93271297476093e-05, + "loss": 0.104, + "step": 62420 + }, + { + "epoch": 2.41051778060929, + "grad_norm": 0.22373056411743164, + "learning_rate": 3.930138872800752e-05, + "loss": 0.1572, + "step": 62430 + }, + { + "epoch": 2.4109038959033167, + "grad_norm": 0.58324134349823, + "learning_rate": 3.9275647708405735e-05, + "loss": 0.1558, + "step": 62440 + }, + { + "epoch": 2.4112900111973437, + "grad_norm": 0.5554331541061401, + "learning_rate": 3.924990668880394e-05, + "loss": 0.1776, + "step": 62450 + }, + { + "epoch": 2.41167612649137, + "grad_norm": 0.4414098262786865, + "learning_rate": 3.9224165669202155e-05, + "loss": 0.1029, + "step": 62460 + }, + { + "epoch": 2.412062241785397, + "grad_norm": 1.496374487876892, + "learning_rate": 3.9198424649600375e-05, + "loss": 0.1948, + "step": 62470 + }, + { + "epoch": 2.4124483570794237, + "grad_norm": 1.6268385648727417, + "learning_rate": 3.917268362999859e-05, + "loss": 0.1853, + "step": 62480 + }, + { + "epoch": 2.4128344723734507, + "grad_norm": 2.2693099975585938, + "learning_rate": 3.9146942610396795e-05, + "loss": 0.2076, + "step": 62490 + }, + { + "epoch": 2.4132205876674773, + "grad_norm": 1.1219795942306519, + "learning_rate": 3.9121201590795015e-05, + "loss": 0.1186, + "step": 62500 + }, + { + "epoch": 2.4136067029615043, + "grad_norm": 0.7887373566627502, + "learning_rate": 3.909546057119323e-05, + "loss": 0.1996, + "step": 62510 + }, + { + "epoch": 2.4139928182555312, + "grad_norm": 0.256099671125412, + "learning_rate": 3.906971955159144e-05, + "loss": 0.0926, + "step": 62520 + }, + { + "epoch": 2.414378933549558, + "grad_norm": 0.6383737921714783, + "learning_rate": 3.9043978531989655e-05, + "loss": 0.1383, + "step": 62530 + }, + { + "epoch": 2.414765048843585, + "grad_norm": 1.5678856372833252, + "learning_rate": 3.901823751238787e-05, + "loss": 0.1486, + "step": 62540 + }, + { + "epoch": 2.4151511641376113, + "grad_norm": 0.11567826569080353, + "learning_rate": 3.899249649278608e-05, + "loss": 0.1031, + "step": 62550 + }, + { + "epoch": 2.4155372794316383, + "grad_norm": 1.3567986488342285, + "learning_rate": 3.8966755473184295e-05, + "loss": 0.1964, + "step": 62560 + }, + { + "epoch": 2.415923394725665, + "grad_norm": 0.1885988563299179, + "learning_rate": 3.894101445358251e-05, + "loss": 0.0998, + "step": 62570 + }, + { + "epoch": 2.416309510019692, + "grad_norm": 0.7068611979484558, + "learning_rate": 3.891527343398072e-05, + "loss": 0.161, + "step": 62580 + }, + { + "epoch": 2.416695625313719, + "grad_norm": 0.34557539224624634, + "learning_rate": 3.8889532414378935e-05, + "loss": 0.143, + "step": 62590 + }, + { + "epoch": 2.4170817406077454, + "grad_norm": 1.6846903562545776, + "learning_rate": 3.886379139477715e-05, + "loss": 0.189, + "step": 62600 + }, + { + "epoch": 2.4174678559017724, + "grad_norm": 0.9236536026000977, + "learning_rate": 3.883805037517536e-05, + "loss": 0.2385, + "step": 62610 + }, + { + "epoch": 2.417853971195799, + "grad_norm": 2.0287728309631348, + "learning_rate": 3.881230935557358e-05, + "loss": 0.3649, + "step": 62620 + }, + { + "epoch": 2.418240086489826, + "grad_norm": 1.3650734424591064, + "learning_rate": 3.878656833597179e-05, + "loss": 0.1766, + "step": 62630 + }, + { + "epoch": 2.4186262017838525, + "grad_norm": 0.44368478655815125, + "learning_rate": 3.876082731637e-05, + "loss": 0.1456, + "step": 62640 + }, + { + "epoch": 2.4190123170778794, + "grad_norm": 1.93278169631958, + "learning_rate": 3.873508629676822e-05, + "loss": 0.1568, + "step": 62650 + }, + { + "epoch": 2.4193984323719064, + "grad_norm": 0.22828684747219086, + "learning_rate": 3.870934527716643e-05, + "loss": 0.1003, + "step": 62660 + }, + { + "epoch": 2.419784547665933, + "grad_norm": 0.796909749507904, + "learning_rate": 3.868360425756464e-05, + "loss": 0.0777, + "step": 62670 + }, + { + "epoch": 2.42017066295996, + "grad_norm": 0.8624141812324524, + "learning_rate": 3.865786323796286e-05, + "loss": 0.2598, + "step": 62680 + }, + { + "epoch": 2.4205567782539865, + "grad_norm": 1.9208048582077026, + "learning_rate": 3.8632122218361075e-05, + "loss": 0.3543, + "step": 62690 + }, + { + "epoch": 2.4209428935480135, + "grad_norm": 0.8410032987594604, + "learning_rate": 3.860638119875928e-05, + "loss": 0.2511, + "step": 62700 + }, + { + "epoch": 2.42132900884204, + "grad_norm": 0.2925935387611389, + "learning_rate": 3.8580640179157494e-05, + "loss": 0.1085, + "step": 62710 + }, + { + "epoch": 2.421715124136067, + "grad_norm": 2.3135852813720703, + "learning_rate": 3.8554899159555715e-05, + "loss": 0.235, + "step": 62720 + }, + { + "epoch": 2.422101239430094, + "grad_norm": 1.9191985130310059, + "learning_rate": 3.852915813995393e-05, + "loss": 0.1893, + "step": 62730 + }, + { + "epoch": 2.4224873547241206, + "grad_norm": 1.2339379787445068, + "learning_rate": 3.8503417120352134e-05, + "loss": 0.1778, + "step": 62740 + }, + { + "epoch": 2.4228734700181476, + "grad_norm": 0.09992465376853943, + "learning_rate": 3.8477676100750355e-05, + "loss": 0.1301, + "step": 62750 + }, + { + "epoch": 2.423259585312174, + "grad_norm": 0.13811521232128143, + "learning_rate": 3.845193508114857e-05, + "loss": 0.1062, + "step": 62760 + }, + { + "epoch": 2.423645700606201, + "grad_norm": 2.9451167583465576, + "learning_rate": 3.8426194061546774e-05, + "loss": 0.086, + "step": 62770 + }, + { + "epoch": 2.4240318159002276, + "grad_norm": 0.23093783855438232, + "learning_rate": 3.8400453041944994e-05, + "loss": 0.0962, + "step": 62780 + }, + { + "epoch": 2.4244179311942546, + "grad_norm": 0.10238637775182724, + "learning_rate": 3.837471202234321e-05, + "loss": 0.1338, + "step": 62790 + }, + { + "epoch": 2.4248040464882816, + "grad_norm": 0.4013136029243469, + "learning_rate": 3.834897100274142e-05, + "loss": 0.3002, + "step": 62800 + }, + { + "epoch": 2.425190161782308, + "grad_norm": 0.3969825804233551, + "learning_rate": 3.8323229983139634e-05, + "loss": 0.2349, + "step": 62810 + }, + { + "epoch": 2.425576277076335, + "grad_norm": 0.9879517555236816, + "learning_rate": 3.829748896353785e-05, + "loss": 0.2564, + "step": 62820 + }, + { + "epoch": 2.4259623923703617, + "grad_norm": 1.5865511894226074, + "learning_rate": 3.827174794393606e-05, + "loss": 0.19, + "step": 62830 + }, + { + "epoch": 2.4263485076643887, + "grad_norm": 2.8750438690185547, + "learning_rate": 3.8246006924334274e-05, + "loss": 0.1515, + "step": 62840 + }, + { + "epoch": 2.4267346229584152, + "grad_norm": 0.9555363059043884, + "learning_rate": 3.822026590473249e-05, + "loss": 0.137, + "step": 62850 + }, + { + "epoch": 2.4271207382524422, + "grad_norm": 0.10716754198074341, + "learning_rate": 3.81945248851307e-05, + "loss": 0.2019, + "step": 62860 + }, + { + "epoch": 2.4275068535464692, + "grad_norm": 2.1091630458831787, + "learning_rate": 3.8168783865528914e-05, + "loss": 0.2126, + "step": 62870 + }, + { + "epoch": 2.4278929688404958, + "grad_norm": 2.2296948432922363, + "learning_rate": 3.814304284592713e-05, + "loss": 0.2098, + "step": 62880 + }, + { + "epoch": 2.4282790841345228, + "grad_norm": 1.1145920753479004, + "learning_rate": 3.811730182632534e-05, + "loss": 0.0998, + "step": 62890 + }, + { + "epoch": 2.4286651994285493, + "grad_norm": 0.6394558548927307, + "learning_rate": 3.809156080672356e-05, + "loss": 0.1766, + "step": 62900 + }, + { + "epoch": 2.4290513147225763, + "grad_norm": 0.16177436709403992, + "learning_rate": 3.806581978712177e-05, + "loss": 0.1776, + "step": 62910 + }, + { + "epoch": 2.429437430016603, + "grad_norm": 1.643153190612793, + "learning_rate": 3.804007876751998e-05, + "loss": 0.1908, + "step": 62920 + }, + { + "epoch": 2.42982354531063, + "grad_norm": 3.011587619781494, + "learning_rate": 3.80143377479182e-05, + "loss": 0.1595, + "step": 62930 + }, + { + "epoch": 2.4302096606046564, + "grad_norm": 1.1857898235321045, + "learning_rate": 3.7988596728316414e-05, + "loss": 0.1117, + "step": 62940 + }, + { + "epoch": 2.4305957758986834, + "grad_norm": 0.984836995601654, + "learning_rate": 3.796285570871462e-05, + "loss": 0.0888, + "step": 62950 + }, + { + "epoch": 2.43098189119271, + "grad_norm": 0.2273918092250824, + "learning_rate": 3.7937114689112834e-05, + "loss": 0.0998, + "step": 62960 + }, + { + "epoch": 2.431368006486737, + "grad_norm": 0.6913338303565979, + "learning_rate": 3.7911373669511054e-05, + "loss": 0.1549, + "step": 62970 + }, + { + "epoch": 2.431754121780764, + "grad_norm": 0.2670879364013672, + "learning_rate": 3.788563264990926e-05, + "loss": 0.1931, + "step": 62980 + }, + { + "epoch": 2.4321402370747904, + "grad_norm": 0.3435567021369934, + "learning_rate": 3.7859891630307474e-05, + "loss": 0.1872, + "step": 62990 + }, + { + "epoch": 2.4325263523688174, + "grad_norm": 2.289534091949463, + "learning_rate": 3.7834150610705694e-05, + "loss": 0.1371, + "step": 63000 + }, + { + "epoch": 2.432912467662844, + "grad_norm": 0.5831142067909241, + "learning_rate": 3.780840959110391e-05, + "loss": 0.1779, + "step": 63010 + }, + { + "epoch": 2.433298582956871, + "grad_norm": 0.4865301847457886, + "learning_rate": 3.7782668571502114e-05, + "loss": 0.2069, + "step": 63020 + }, + { + "epoch": 2.4336846982508975, + "grad_norm": 0.9294113516807556, + "learning_rate": 3.7756927551900334e-05, + "loss": 0.2059, + "step": 63030 + }, + { + "epoch": 2.4340708135449245, + "grad_norm": 1.044704794883728, + "learning_rate": 3.773118653229855e-05, + "loss": 0.2075, + "step": 63040 + }, + { + "epoch": 2.4344569288389515, + "grad_norm": 0.07156316190958023, + "learning_rate": 3.770544551269676e-05, + "loss": 0.0827, + "step": 63050 + }, + { + "epoch": 2.434843044132978, + "grad_norm": 1.0131940841674805, + "learning_rate": 3.7679704493094974e-05, + "loss": 0.0786, + "step": 63060 + }, + { + "epoch": 2.435229159427005, + "grad_norm": 0.8649851679801941, + "learning_rate": 3.765396347349319e-05, + "loss": 0.1476, + "step": 63070 + }, + { + "epoch": 2.4356152747210316, + "grad_norm": 4.939096927642822, + "learning_rate": 3.76282224538914e-05, + "loss": 0.2188, + "step": 63080 + }, + { + "epoch": 2.4360013900150586, + "grad_norm": 0.4139706790447235, + "learning_rate": 3.7602481434289614e-05, + "loss": 0.281, + "step": 63090 + }, + { + "epoch": 2.436387505309085, + "grad_norm": 0.0399312861263752, + "learning_rate": 3.757674041468783e-05, + "loss": 0.3068, + "step": 63100 + }, + { + "epoch": 2.436773620603112, + "grad_norm": 0.7237934470176697, + "learning_rate": 3.755099939508604e-05, + "loss": 0.2222, + "step": 63110 + }, + { + "epoch": 2.437159735897139, + "grad_norm": 0.16770142316818237, + "learning_rate": 3.7525258375484254e-05, + "loss": 0.1302, + "step": 63120 + }, + { + "epoch": 2.4375458511911656, + "grad_norm": 1.9887669086456299, + "learning_rate": 3.749951735588247e-05, + "loss": 0.0794, + "step": 63130 + }, + { + "epoch": 2.4379319664851926, + "grad_norm": 2.856318712234497, + "learning_rate": 3.747377633628068e-05, + "loss": 0.0996, + "step": 63140 + }, + { + "epoch": 2.438318081779219, + "grad_norm": 0.9471076130867004, + "learning_rate": 3.74480353166789e-05, + "loss": 0.1915, + "step": 63150 + }, + { + "epoch": 2.438704197073246, + "grad_norm": 1.4671097993850708, + "learning_rate": 3.742229429707711e-05, + "loss": 0.1851, + "step": 63160 + }, + { + "epoch": 2.4390903123672727, + "grad_norm": 0.3654942810535431, + "learning_rate": 3.739655327747532e-05, + "loss": 0.1091, + "step": 63170 + }, + { + "epoch": 2.4394764276612997, + "grad_norm": 0.8947110176086426, + "learning_rate": 3.737081225787354e-05, + "loss": 0.1741, + "step": 63180 + }, + { + "epoch": 2.4398625429553267, + "grad_norm": 1.3367135524749756, + "learning_rate": 3.7345071238271754e-05, + "loss": 0.2007, + "step": 63190 + }, + { + "epoch": 2.440248658249353, + "grad_norm": 2.484145164489746, + "learning_rate": 3.731933021866996e-05, + "loss": 0.1794, + "step": 63200 + }, + { + "epoch": 2.44063477354338, + "grad_norm": 1.0260628461837769, + "learning_rate": 3.729358919906818e-05, + "loss": 0.3053, + "step": 63210 + }, + { + "epoch": 2.4410208888374068, + "grad_norm": 1.0322519540786743, + "learning_rate": 3.7267848179466394e-05, + "loss": 0.1826, + "step": 63220 + }, + { + "epoch": 2.4414070041314337, + "grad_norm": 0.6321549415588379, + "learning_rate": 3.72421071598646e-05, + "loss": 0.1561, + "step": 63230 + }, + { + "epoch": 2.4417931194254603, + "grad_norm": 1.2646952867507935, + "learning_rate": 3.7216366140262814e-05, + "loss": 0.0833, + "step": 63240 + }, + { + "epoch": 2.4421792347194873, + "grad_norm": 1.1415789127349854, + "learning_rate": 3.7190625120661034e-05, + "loss": 0.3039, + "step": 63250 + }, + { + "epoch": 2.4425653500135143, + "grad_norm": 1.143133521080017, + "learning_rate": 3.716488410105925e-05, + "loss": 0.1585, + "step": 63260 + }, + { + "epoch": 2.442951465307541, + "grad_norm": 0.32217004895210266, + "learning_rate": 3.7139143081457453e-05, + "loss": 0.1776, + "step": 63270 + }, + { + "epoch": 2.443337580601568, + "grad_norm": 0.04204453527927399, + "learning_rate": 3.7113402061855674e-05, + "loss": 0.1966, + "step": 63280 + }, + { + "epoch": 2.4437236958955943, + "grad_norm": 3.0139830112457275, + "learning_rate": 3.708766104225389e-05, + "loss": 0.2323, + "step": 63290 + }, + { + "epoch": 2.4441098111896213, + "grad_norm": 0.6170455813407898, + "learning_rate": 3.70619200226521e-05, + "loss": 0.18, + "step": 63300 + }, + { + "epoch": 2.444495926483648, + "grad_norm": 0.08699564635753632, + "learning_rate": 3.7036179003050313e-05, + "loss": 0.2548, + "step": 63310 + }, + { + "epoch": 2.444882041777675, + "grad_norm": 0.18672427535057068, + "learning_rate": 3.701043798344853e-05, + "loss": 0.2081, + "step": 63320 + }, + { + "epoch": 2.4452681570717014, + "grad_norm": 0.32661938667297363, + "learning_rate": 3.698469696384674e-05, + "loss": 0.1391, + "step": 63330 + }, + { + "epoch": 2.4456542723657284, + "grad_norm": 0.521452009677887, + "learning_rate": 3.6958955944244953e-05, + "loss": 0.1688, + "step": 63340 + }, + { + "epoch": 2.446040387659755, + "grad_norm": 1.4832172393798828, + "learning_rate": 3.693321492464317e-05, + "loss": 0.1051, + "step": 63350 + }, + { + "epoch": 2.446426502953782, + "grad_norm": 0.4200538694858551, + "learning_rate": 3.690747390504138e-05, + "loss": 0.1822, + "step": 63360 + }, + { + "epoch": 2.446812618247809, + "grad_norm": 0.48908549547195435, + "learning_rate": 3.688173288543959e-05, + "loss": 0.0922, + "step": 63370 + }, + { + "epoch": 2.4471987335418355, + "grad_norm": 1.988203525543213, + "learning_rate": 3.685599186583781e-05, + "loss": 0.1412, + "step": 63380 + }, + { + "epoch": 2.4475848488358625, + "grad_norm": 0.8950991630554199, + "learning_rate": 3.683025084623602e-05, + "loss": 0.2411, + "step": 63390 + }, + { + "epoch": 2.447970964129889, + "grad_norm": 1.0535110235214233, + "learning_rate": 3.680450982663424e-05, + "loss": 0.1539, + "step": 63400 + }, + { + "epoch": 2.448357079423916, + "grad_norm": 1.7036796808242798, + "learning_rate": 3.6778768807032447e-05, + "loss": 0.2954, + "step": 63410 + }, + { + "epoch": 2.4487431947179426, + "grad_norm": 0.32762983441352844, + "learning_rate": 3.675302778743066e-05, + "loss": 0.1721, + "step": 63420 + }, + { + "epoch": 2.4491293100119695, + "grad_norm": 3.3198564052581787, + "learning_rate": 3.672728676782888e-05, + "loss": 0.3087, + "step": 63430 + }, + { + "epoch": 2.4495154253059965, + "grad_norm": 2.290881872177124, + "learning_rate": 3.6701545748227087e-05, + "loss": 0.0957, + "step": 63440 + }, + { + "epoch": 2.449901540600023, + "grad_norm": 1.985274076461792, + "learning_rate": 3.66758047286253e-05, + "loss": 0.146, + "step": 63450 + }, + { + "epoch": 2.45028765589405, + "grad_norm": 1.5870091915130615, + "learning_rate": 3.665006370902352e-05, + "loss": 0.179, + "step": 63460 + }, + { + "epoch": 2.4506737711880766, + "grad_norm": 0.29243603348731995, + "learning_rate": 3.662432268942173e-05, + "loss": 0.2421, + "step": 63470 + }, + { + "epoch": 2.4510598864821036, + "grad_norm": 1.050746202468872, + "learning_rate": 3.659858166981994e-05, + "loss": 0.1841, + "step": 63480 + }, + { + "epoch": 2.45144600177613, + "grad_norm": 0.9252954721450806, + "learning_rate": 3.657284065021815e-05, + "loss": 0.294, + "step": 63490 + }, + { + "epoch": 2.451832117070157, + "grad_norm": 1.6930452585220337, + "learning_rate": 3.654709963061637e-05, + "loss": 0.083, + "step": 63500 + }, + { + "epoch": 2.452218232364184, + "grad_norm": 1.2885856628417969, + "learning_rate": 3.6521358611014586e-05, + "loss": 0.1526, + "step": 63510 + }, + { + "epoch": 2.4526043476582107, + "grad_norm": 0.19372563064098358, + "learning_rate": 3.649561759141279e-05, + "loss": 0.1417, + "step": 63520 + }, + { + "epoch": 2.4529904629522377, + "grad_norm": 0.9957149624824524, + "learning_rate": 3.646987657181101e-05, + "loss": 0.2321, + "step": 63530 + }, + { + "epoch": 2.453376578246264, + "grad_norm": 0.8055297136306763, + "learning_rate": 3.6444135552209226e-05, + "loss": 0.178, + "step": 63540 + }, + { + "epoch": 2.453762693540291, + "grad_norm": 0.37042102217674255, + "learning_rate": 3.641839453260743e-05, + "loss": 0.1533, + "step": 63550 + }, + { + "epoch": 2.4541488088343177, + "grad_norm": 0.09681963920593262, + "learning_rate": 3.639265351300565e-05, + "loss": 0.0807, + "step": 63560 + }, + { + "epoch": 2.4545349241283447, + "grad_norm": 0.17449086904525757, + "learning_rate": 3.6366912493403866e-05, + "loss": 0.0819, + "step": 63570 + }, + { + "epoch": 2.4549210394223717, + "grad_norm": 0.9616051316261292, + "learning_rate": 3.634117147380208e-05, + "loss": 0.0952, + "step": 63580 + }, + { + "epoch": 2.4553071547163983, + "grad_norm": 3.450044870376587, + "learning_rate": 3.631543045420029e-05, + "loss": 0.247, + "step": 63590 + }, + { + "epoch": 2.4556932700104253, + "grad_norm": 0.5883270502090454, + "learning_rate": 3.6289689434598506e-05, + "loss": 0.105, + "step": 63600 + }, + { + "epoch": 2.456079385304452, + "grad_norm": 1.2351908683776855, + "learning_rate": 3.626394841499672e-05, + "loss": 0.1978, + "step": 63610 + }, + { + "epoch": 2.456465500598479, + "grad_norm": 0.08615940809249878, + "learning_rate": 3.623820739539493e-05, + "loss": 0.0366, + "step": 63620 + }, + { + "epoch": 2.4568516158925053, + "grad_norm": 0.758092999458313, + "learning_rate": 3.6212466375793146e-05, + "loss": 0.0538, + "step": 63630 + }, + { + "epoch": 2.4572377311865323, + "grad_norm": 0.2542964220046997, + "learning_rate": 3.618672535619136e-05, + "loss": 0.217, + "step": 63640 + }, + { + "epoch": 2.4576238464805593, + "grad_norm": 1.8404854536056519, + "learning_rate": 3.616098433658957e-05, + "loss": 0.1246, + "step": 63650 + }, + { + "epoch": 2.458009961774586, + "grad_norm": 0.06438548862934113, + "learning_rate": 3.6135243316987786e-05, + "loss": 0.1816, + "step": 63660 + }, + { + "epoch": 2.458396077068613, + "grad_norm": 1.4888163805007935, + "learning_rate": 3.6109502297386e-05, + "loss": 0.1496, + "step": 63670 + }, + { + "epoch": 2.4587821923626394, + "grad_norm": 0.06935808062553406, + "learning_rate": 3.608376127778422e-05, + "loss": 0.1095, + "step": 63680 + }, + { + "epoch": 2.4591683076566664, + "grad_norm": 0.9536172151565552, + "learning_rate": 3.6058020258182426e-05, + "loss": 0.1572, + "step": 63690 + }, + { + "epoch": 2.459554422950693, + "grad_norm": 3.4437315464019775, + "learning_rate": 3.603227923858064e-05, + "loss": 0.2611, + "step": 63700 + }, + { + "epoch": 2.45994053824472, + "grad_norm": 2.3220934867858887, + "learning_rate": 3.600653821897886e-05, + "loss": 0.2999, + "step": 63710 + }, + { + "epoch": 2.460326653538747, + "grad_norm": 1.1044209003448486, + "learning_rate": 3.598079719937707e-05, + "loss": 0.0936, + "step": 63720 + }, + { + "epoch": 2.4607127688327735, + "grad_norm": 2.439162015914917, + "learning_rate": 3.595505617977528e-05, + "loss": 0.2898, + "step": 63730 + }, + { + "epoch": 2.4610988841268004, + "grad_norm": 0.9153732061386108, + "learning_rate": 3.59293151601735e-05, + "loss": 0.0976, + "step": 63740 + }, + { + "epoch": 2.461484999420827, + "grad_norm": 0.019965412095189095, + "learning_rate": 3.590357414057171e-05, + "loss": 0.1132, + "step": 63750 + }, + { + "epoch": 2.461871114714854, + "grad_norm": 1.2829655408859253, + "learning_rate": 3.587783312096992e-05, + "loss": 0.109, + "step": 63760 + }, + { + "epoch": 2.4622572300088805, + "grad_norm": 1.2905359268188477, + "learning_rate": 3.585209210136813e-05, + "loss": 0.288, + "step": 63770 + }, + { + "epoch": 2.4626433453029075, + "grad_norm": 0.6175247430801392, + "learning_rate": 3.582635108176635e-05, + "loss": 0.1877, + "step": 63780 + }, + { + "epoch": 2.463029460596934, + "grad_norm": 0.15092957019805908, + "learning_rate": 3.5800610062164566e-05, + "loss": 0.1871, + "step": 63790 + }, + { + "epoch": 2.463415575890961, + "grad_norm": 0.4543861746788025, + "learning_rate": 3.577486904256277e-05, + "loss": 0.1566, + "step": 63800 + }, + { + "epoch": 2.4638016911849876, + "grad_norm": 2.4032862186431885, + "learning_rate": 3.574912802296099e-05, + "loss": 0.338, + "step": 63810 + }, + { + "epoch": 2.4641878064790146, + "grad_norm": 0.4644568860530853, + "learning_rate": 3.5723387003359206e-05, + "loss": 0.4347, + "step": 63820 + }, + { + "epoch": 2.4645739217730416, + "grad_norm": 2.190516233444214, + "learning_rate": 3.569764598375742e-05, + "loss": 0.1055, + "step": 63830 + }, + { + "epoch": 2.464960037067068, + "grad_norm": 0.18661876022815704, + "learning_rate": 3.567190496415563e-05, + "loss": 0.4476, + "step": 63840 + }, + { + "epoch": 2.465346152361095, + "grad_norm": 0.7265095114707947, + "learning_rate": 3.5646163944553846e-05, + "loss": 0.1121, + "step": 63850 + }, + { + "epoch": 2.4657322676551217, + "grad_norm": 0.2888505160808563, + "learning_rate": 3.562042292495206e-05, + "loss": 0.1208, + "step": 63860 + }, + { + "epoch": 2.4661183829491486, + "grad_norm": 0.9269798398017883, + "learning_rate": 3.559468190535027e-05, + "loss": 0.1376, + "step": 63870 + }, + { + "epoch": 2.466504498243175, + "grad_norm": 0.048346057534217834, + "learning_rate": 3.5568940885748486e-05, + "loss": 0.3057, + "step": 63880 + }, + { + "epoch": 2.466890613537202, + "grad_norm": 1.5524492263793945, + "learning_rate": 3.55431998661467e-05, + "loss": 0.077, + "step": 63890 + }, + { + "epoch": 2.467276728831229, + "grad_norm": 0.7234669327735901, + "learning_rate": 3.551745884654491e-05, + "loss": 0.1701, + "step": 63900 + }, + { + "epoch": 2.4676628441252557, + "grad_norm": 0.9384757280349731, + "learning_rate": 3.5491717826943126e-05, + "loss": 0.1085, + "step": 63910 + }, + { + "epoch": 2.4680489594192827, + "grad_norm": 1.6186715364456177, + "learning_rate": 3.546597680734134e-05, + "loss": 0.1556, + "step": 63920 + }, + { + "epoch": 2.4684350747133093, + "grad_norm": 1.2746638059616089, + "learning_rate": 3.544023578773956e-05, + "loss": 0.2306, + "step": 63930 + }, + { + "epoch": 2.4688211900073362, + "grad_norm": 2.7122621536254883, + "learning_rate": 3.5414494768137766e-05, + "loss": 0.1482, + "step": 63940 + }, + { + "epoch": 2.469207305301363, + "grad_norm": 1.9252265691757202, + "learning_rate": 3.538875374853598e-05, + "loss": 0.2278, + "step": 63950 + }, + { + "epoch": 2.4695934205953898, + "grad_norm": 1.1436023712158203, + "learning_rate": 3.53630127289342e-05, + "loss": 0.1518, + "step": 63960 + }, + { + "epoch": 2.4699795358894168, + "grad_norm": 0.40255314111709595, + "learning_rate": 3.533727170933241e-05, + "loss": 0.067, + "step": 63970 + }, + { + "epoch": 2.4703656511834433, + "grad_norm": 0.5762872695922852, + "learning_rate": 3.531153068973062e-05, + "loss": 0.2584, + "step": 63980 + }, + { + "epoch": 2.4707517664774703, + "grad_norm": 0.977530837059021, + "learning_rate": 3.528578967012884e-05, + "loss": 0.113, + "step": 63990 + }, + { + "epoch": 2.471137881771497, + "grad_norm": 0.5977995991706848, + "learning_rate": 3.526004865052705e-05, + "loss": 0.2179, + "step": 64000 + }, + { + "epoch": 2.471523997065524, + "grad_norm": 0.33924487233161926, + "learning_rate": 3.523430763092526e-05, + "loss": 0.1102, + "step": 64010 + }, + { + "epoch": 2.4719101123595504, + "grad_norm": 1.6377263069152832, + "learning_rate": 3.520856661132347e-05, + "loss": 0.1848, + "step": 64020 + }, + { + "epoch": 2.4722962276535774, + "grad_norm": 0.3962012827396393, + "learning_rate": 3.518282559172169e-05, + "loss": 0.2144, + "step": 64030 + }, + { + "epoch": 2.4726823429476044, + "grad_norm": 2.4445207118988037, + "learning_rate": 3.5157084572119906e-05, + "loss": 0.0798, + "step": 64040 + }, + { + "epoch": 2.473068458241631, + "grad_norm": 1.1374174356460571, + "learning_rate": 3.513134355251811e-05, + "loss": 0.0999, + "step": 64050 + }, + { + "epoch": 2.473454573535658, + "grad_norm": 1.128503680229187, + "learning_rate": 3.510560253291633e-05, + "loss": 0.0811, + "step": 64060 + }, + { + "epoch": 2.4738406888296844, + "grad_norm": 0.7874391078948975, + "learning_rate": 3.5079861513314545e-05, + "loss": 0.1042, + "step": 64070 + }, + { + "epoch": 2.4742268041237114, + "grad_norm": 0.36891883611679077, + "learning_rate": 3.505412049371276e-05, + "loss": 0.2693, + "step": 64080 + }, + { + "epoch": 2.474612919417738, + "grad_norm": 1.4663618803024292, + "learning_rate": 3.502837947411097e-05, + "loss": 0.1469, + "step": 64090 + }, + { + "epoch": 2.474999034711765, + "grad_norm": 1.6910697221755981, + "learning_rate": 3.5002638454509185e-05, + "loss": 0.1506, + "step": 64100 + }, + { + "epoch": 2.475385150005792, + "grad_norm": 2.6914491653442383, + "learning_rate": 3.49768974349074e-05, + "loss": 0.1182, + "step": 64110 + }, + { + "epoch": 2.4757712652998185, + "grad_norm": 0.7562038898468018, + "learning_rate": 3.495115641530561e-05, + "loss": 0.1935, + "step": 64120 + }, + { + "epoch": 2.4761573805938455, + "grad_norm": 0.9019849300384521, + "learning_rate": 3.4925415395703825e-05, + "loss": 0.1456, + "step": 64130 + }, + { + "epoch": 2.476543495887872, + "grad_norm": 1.4131752252578735, + "learning_rate": 3.489967437610204e-05, + "loss": 0.1502, + "step": 64140 + }, + { + "epoch": 2.476929611181899, + "grad_norm": 1.1619501113891602, + "learning_rate": 3.487393335650025e-05, + "loss": 0.1559, + "step": 64150 + }, + { + "epoch": 2.4773157264759256, + "grad_norm": 0.38123640418052673, + "learning_rate": 3.4848192336898465e-05, + "loss": 0.1273, + "step": 64160 + }, + { + "epoch": 2.4777018417699526, + "grad_norm": 2.8837268352508545, + "learning_rate": 3.482245131729668e-05, + "loss": 0.1264, + "step": 64170 + }, + { + "epoch": 2.4780879570639796, + "grad_norm": 4.011549949645996, + "learning_rate": 3.47967102976949e-05, + "loss": 0.3243, + "step": 64180 + }, + { + "epoch": 2.478474072358006, + "grad_norm": 1.187883973121643, + "learning_rate": 3.4770969278093105e-05, + "loss": 0.1948, + "step": 64190 + }, + { + "epoch": 2.478860187652033, + "grad_norm": 1.1129032373428345, + "learning_rate": 3.474522825849132e-05, + "loss": 0.1226, + "step": 64200 + }, + { + "epoch": 2.4792463029460596, + "grad_norm": 0.3412770926952362, + "learning_rate": 3.471948723888954e-05, + "loss": 0.1282, + "step": 64210 + }, + { + "epoch": 2.4796324182400866, + "grad_norm": 1.1567891836166382, + "learning_rate": 3.4693746219287745e-05, + "loss": 0.2034, + "step": 64220 + }, + { + "epoch": 2.480018533534113, + "grad_norm": 0.05797566846013069, + "learning_rate": 3.466800519968596e-05, + "loss": 0.1438, + "step": 64230 + }, + { + "epoch": 2.48040464882814, + "grad_norm": 0.8097573518753052, + "learning_rate": 3.464226418008418e-05, + "loss": 0.2128, + "step": 64240 + }, + { + "epoch": 2.4807907641221667, + "grad_norm": 1.383527398109436, + "learning_rate": 3.461652316048239e-05, + "loss": 0.2379, + "step": 64250 + }, + { + "epoch": 2.4811768794161937, + "grad_norm": 1.1194959878921509, + "learning_rate": 3.45907821408806e-05, + "loss": 0.3002, + "step": 64260 + }, + { + "epoch": 2.4815629947102202, + "grad_norm": 2.7457661628723145, + "learning_rate": 3.456504112127881e-05, + "loss": 0.2035, + "step": 64270 + }, + { + "epoch": 2.4819491100042472, + "grad_norm": 1.852704405784607, + "learning_rate": 3.453930010167703e-05, + "loss": 0.1338, + "step": 64280 + }, + { + "epoch": 2.482335225298274, + "grad_norm": 1.2378255128860474, + "learning_rate": 3.4513559082075245e-05, + "loss": 0.1083, + "step": 64290 + }, + { + "epoch": 2.4827213405923008, + "grad_norm": 1.2217592000961304, + "learning_rate": 3.448781806247345e-05, + "loss": 0.2626, + "step": 64300 + }, + { + "epoch": 2.4831074558863278, + "grad_norm": 0.8216773271560669, + "learning_rate": 3.446207704287167e-05, + "loss": 0.1455, + "step": 64310 + }, + { + "epoch": 2.4834935711803543, + "grad_norm": 1.122636318206787, + "learning_rate": 3.4436336023269885e-05, + "loss": 0.2029, + "step": 64320 + }, + { + "epoch": 2.4838796864743813, + "grad_norm": 0.4117072820663452, + "learning_rate": 3.441059500366809e-05, + "loss": 0.2123, + "step": 64330 + }, + { + "epoch": 2.484265801768408, + "grad_norm": 0.04222499951720238, + "learning_rate": 3.438485398406631e-05, + "loss": 0.2791, + "step": 64340 + }, + { + "epoch": 2.484651917062435, + "grad_norm": 1.2753795385360718, + "learning_rate": 3.4359112964464525e-05, + "loss": 0.213, + "step": 64350 + }, + { + "epoch": 2.485038032356462, + "grad_norm": 0.8051297068595886, + "learning_rate": 3.433337194486274e-05, + "loss": 0.2523, + "step": 64360 + }, + { + "epoch": 2.4854241476504884, + "grad_norm": 2.218684196472168, + "learning_rate": 3.430763092526095e-05, + "loss": 0.2185, + "step": 64370 + }, + { + "epoch": 2.4858102629445153, + "grad_norm": 2.2917842864990234, + "learning_rate": 3.4281889905659165e-05, + "loss": 0.1521, + "step": 64380 + }, + { + "epoch": 2.486196378238542, + "grad_norm": 1.0776972770690918, + "learning_rate": 3.425614888605738e-05, + "loss": 0.2217, + "step": 64390 + }, + { + "epoch": 2.486582493532569, + "grad_norm": 1.0344847440719604, + "learning_rate": 3.423040786645559e-05, + "loss": 0.1871, + "step": 64400 + }, + { + "epoch": 2.4869686088265954, + "grad_norm": 0.6094161868095398, + "learning_rate": 3.4204666846853805e-05, + "loss": 0.1073, + "step": 64410 + }, + { + "epoch": 2.4873547241206224, + "grad_norm": 0.8258315920829773, + "learning_rate": 3.417892582725202e-05, + "loss": 0.1897, + "step": 64420 + }, + { + "epoch": 2.4877408394146494, + "grad_norm": 0.6779630184173584, + "learning_rate": 3.415318480765023e-05, + "loss": 0.0847, + "step": 64430 + }, + { + "epoch": 2.488126954708676, + "grad_norm": 1.7514374256134033, + "learning_rate": 3.4127443788048445e-05, + "loss": 0.2243, + "step": 64440 + }, + { + "epoch": 2.488513070002703, + "grad_norm": 0.5367060899734497, + "learning_rate": 3.410170276844666e-05, + "loss": 0.3064, + "step": 64450 + }, + { + "epoch": 2.4888991852967295, + "grad_norm": 1.0501765012741089, + "learning_rate": 3.407596174884488e-05, + "loss": 0.1908, + "step": 64460 + }, + { + "epoch": 2.4892853005907565, + "grad_norm": 0.8501892685890198, + "learning_rate": 3.4050220729243085e-05, + "loss": 0.1238, + "step": 64470 + }, + { + "epoch": 2.489671415884783, + "grad_norm": 0.2177915871143341, + "learning_rate": 3.40244797096413e-05, + "loss": 0.114, + "step": 64480 + }, + { + "epoch": 2.49005753117881, + "grad_norm": 0.04332759603857994, + "learning_rate": 3.399873869003952e-05, + "loss": 0.2304, + "step": 64490 + }, + { + "epoch": 2.490443646472837, + "grad_norm": 0.6862795352935791, + "learning_rate": 3.397299767043773e-05, + "loss": 0.0624, + "step": 64500 + }, + { + "epoch": 2.4908297617668635, + "grad_norm": 0.7046298980712891, + "learning_rate": 3.394725665083594e-05, + "loss": 0.1957, + "step": 64510 + }, + { + "epoch": 2.4912158770608905, + "grad_norm": 0.5339323878288269, + "learning_rate": 3.392151563123416e-05, + "loss": 0.2889, + "step": 64520 + }, + { + "epoch": 2.491601992354917, + "grad_norm": 0.28813856840133667, + "learning_rate": 3.389577461163237e-05, + "loss": 0.0679, + "step": 64530 + }, + { + "epoch": 2.491988107648944, + "grad_norm": 0.6499680876731873, + "learning_rate": 3.387003359203058e-05, + "loss": 0.1949, + "step": 64540 + }, + { + "epoch": 2.4923742229429706, + "grad_norm": 0.26736152172088623, + "learning_rate": 3.384429257242879e-05, + "loss": 0.1647, + "step": 64550 + }, + { + "epoch": 2.4927603382369976, + "grad_norm": 2.9154460430145264, + "learning_rate": 3.381855155282701e-05, + "loss": 0.2578, + "step": 64560 + }, + { + "epoch": 2.4931464535310246, + "grad_norm": 0.23644079267978668, + "learning_rate": 3.3792810533225225e-05, + "loss": 0.1555, + "step": 64570 + }, + { + "epoch": 2.493532568825051, + "grad_norm": 1.459173560142517, + "learning_rate": 3.376706951362343e-05, + "loss": 0.1637, + "step": 64580 + }, + { + "epoch": 2.493918684119078, + "grad_norm": 0.5812413692474365, + "learning_rate": 3.374132849402165e-05, + "loss": 0.1067, + "step": 64590 + }, + { + "epoch": 2.4943047994131047, + "grad_norm": 1.358742594718933, + "learning_rate": 3.3715587474419865e-05, + "loss": 0.2211, + "step": 64600 + }, + { + "epoch": 2.4946909147071317, + "grad_norm": 1.94925856590271, + "learning_rate": 3.368984645481808e-05, + "loss": 0.4372, + "step": 64610 + }, + { + "epoch": 2.495077030001158, + "grad_norm": 0.4507911801338196, + "learning_rate": 3.366410543521629e-05, + "loss": 0.1736, + "step": 64620 + }, + { + "epoch": 2.495463145295185, + "grad_norm": 0.7500709891319275, + "learning_rate": 3.3638364415614504e-05, + "loss": 0.2025, + "step": 64630 + }, + { + "epoch": 2.4958492605892117, + "grad_norm": 2.6973438262939453, + "learning_rate": 3.361262339601272e-05, + "loss": 0.1173, + "step": 64640 + }, + { + "epoch": 2.4962353758832387, + "grad_norm": 2.0454306602478027, + "learning_rate": 3.358688237641093e-05, + "loss": 0.1304, + "step": 64650 + }, + { + "epoch": 2.4966214911772653, + "grad_norm": 2.469876527786255, + "learning_rate": 3.3561141356809144e-05, + "loss": 0.1758, + "step": 64660 + }, + { + "epoch": 2.4970076064712923, + "grad_norm": 0.8067110180854797, + "learning_rate": 3.353540033720736e-05, + "loss": 0.0747, + "step": 64670 + }, + { + "epoch": 2.4973937217653193, + "grad_norm": 1.2773382663726807, + "learning_rate": 3.350965931760557e-05, + "loss": 0.1151, + "step": 64680 + }, + { + "epoch": 2.497779837059346, + "grad_norm": 0.1975592076778412, + "learning_rate": 3.3483918298003784e-05, + "loss": 0.2104, + "step": 64690 + }, + { + "epoch": 2.498165952353373, + "grad_norm": 0.6423165202140808, + "learning_rate": 3.3458177278402e-05, + "loss": 0.1885, + "step": 64700 + }, + { + "epoch": 2.4985520676473993, + "grad_norm": 1.818458914756775, + "learning_rate": 3.343243625880022e-05, + "loss": 0.1506, + "step": 64710 + }, + { + "epoch": 2.4989381829414263, + "grad_norm": 0.30381113290786743, + "learning_rate": 3.3406695239198424e-05, + "loss": 0.1791, + "step": 64720 + }, + { + "epoch": 2.499324298235453, + "grad_norm": 2.867393732070923, + "learning_rate": 3.338095421959664e-05, + "loss": 0.1854, + "step": 64730 + }, + { + "epoch": 2.49971041352948, + "grad_norm": 0.22897863388061523, + "learning_rate": 3.335521319999486e-05, + "loss": 0.1313, + "step": 64740 + }, + { + "epoch": 2.500096528823507, + "grad_norm": 2.5336554050445557, + "learning_rate": 3.332947218039307e-05, + "loss": 0.1814, + "step": 64750 + }, + { + "epoch": 2.5004826441175334, + "grad_norm": 0.10779833793640137, + "learning_rate": 3.330373116079128e-05, + "loss": 0.1563, + "step": 64760 + }, + { + "epoch": 2.5008687594115604, + "grad_norm": 1.1376827955245972, + "learning_rate": 3.32779901411895e-05, + "loss": 0.1495, + "step": 64770 + }, + { + "epoch": 2.501254874705587, + "grad_norm": 0.7156823873519897, + "learning_rate": 3.325224912158771e-05, + "loss": 0.2531, + "step": 64780 + }, + { + "epoch": 2.501640989999614, + "grad_norm": 0.7690051198005676, + "learning_rate": 3.322650810198592e-05, + "loss": 0.1204, + "step": 64790 + }, + { + "epoch": 2.5020271052936405, + "grad_norm": 0.0718945860862732, + "learning_rate": 3.320076708238413e-05, + "loss": 0.1044, + "step": 64800 + }, + { + "epoch": 2.5024132205876675, + "grad_norm": 0.12632794678211212, + "learning_rate": 3.317502606278235e-05, + "loss": 0.1007, + "step": 64810 + }, + { + "epoch": 2.5027993358816945, + "grad_norm": 0.9840032458305359, + "learning_rate": 3.3149285043180564e-05, + "loss": 0.2399, + "step": 64820 + }, + { + "epoch": 2.503185451175721, + "grad_norm": 0.3271815776824951, + "learning_rate": 3.312354402357877e-05, + "loss": 0.117, + "step": 64830 + }, + { + "epoch": 2.503571566469748, + "grad_norm": 2.1266753673553467, + "learning_rate": 3.309780300397699e-05, + "loss": 0.1819, + "step": 64840 + }, + { + "epoch": 2.5039576817637745, + "grad_norm": 0.5041390061378479, + "learning_rate": 3.3072061984375204e-05, + "loss": 0.0579, + "step": 64850 + }, + { + "epoch": 2.5043437970578015, + "grad_norm": 0.2855200469493866, + "learning_rate": 3.304632096477341e-05, + "loss": 0.1814, + "step": 64860 + }, + { + "epoch": 2.504729912351828, + "grad_norm": 1.9890060424804688, + "learning_rate": 3.302057994517163e-05, + "loss": 0.3038, + "step": 64870 + }, + { + "epoch": 2.505116027645855, + "grad_norm": 0.5688171982765198, + "learning_rate": 3.2994838925569844e-05, + "loss": 0.0947, + "step": 64880 + }, + { + "epoch": 2.505502142939882, + "grad_norm": 3.265097141265869, + "learning_rate": 3.296909790596806e-05, + "loss": 0.1722, + "step": 64890 + }, + { + "epoch": 2.5058882582339086, + "grad_norm": 1.0011316537857056, + "learning_rate": 3.294335688636627e-05, + "loss": 0.217, + "step": 64900 + }, + { + "epoch": 2.5062743735279356, + "grad_norm": 2.054866075515747, + "learning_rate": 3.2917615866764484e-05, + "loss": 0.1862, + "step": 64910 + }, + { + "epoch": 2.506660488821962, + "grad_norm": 1.2730998992919922, + "learning_rate": 3.28918748471627e-05, + "loss": 0.1023, + "step": 64920 + }, + { + "epoch": 2.507046604115989, + "grad_norm": 1.5231776237487793, + "learning_rate": 3.286613382756091e-05, + "loss": 0.1922, + "step": 64930 + }, + { + "epoch": 2.5074327194100157, + "grad_norm": 0.7604933977127075, + "learning_rate": 3.2840392807959124e-05, + "loss": 0.1329, + "step": 64940 + }, + { + "epoch": 2.5078188347040427, + "grad_norm": 1.243506669998169, + "learning_rate": 3.281465178835734e-05, + "loss": 0.2142, + "step": 64950 + }, + { + "epoch": 2.5082049499980696, + "grad_norm": 3.0195231437683105, + "learning_rate": 3.278891076875556e-05, + "loss": 0.3474, + "step": 64960 + }, + { + "epoch": 2.508591065292096, + "grad_norm": 2.2186105251312256, + "learning_rate": 3.2763169749153764e-05, + "loss": 0.2125, + "step": 64970 + }, + { + "epoch": 2.508977180586123, + "grad_norm": 1.1483558416366577, + "learning_rate": 3.273742872955198e-05, + "loss": 0.1427, + "step": 64980 + }, + { + "epoch": 2.5093632958801497, + "grad_norm": 1.3046457767486572, + "learning_rate": 3.27116877099502e-05, + "loss": 0.0859, + "step": 64990 + }, + { + "epoch": 2.5097494111741767, + "grad_norm": 0.04392600804567337, + "learning_rate": 3.2685946690348404e-05, + "loss": 0.146, + "step": 65000 + }, + { + "epoch": 2.5101355264682033, + "grad_norm": 0.8138188123703003, + "learning_rate": 3.266020567074662e-05, + "loss": 0.1809, + "step": 65010 + }, + { + "epoch": 2.5105216417622302, + "grad_norm": 2.0875182151794434, + "learning_rate": 3.263446465114484e-05, + "loss": 0.1925, + "step": 65020 + }, + { + "epoch": 2.5109077570562572, + "grad_norm": 0.4890693426132202, + "learning_rate": 3.260872363154305e-05, + "loss": 0.1189, + "step": 65030 + }, + { + "epoch": 2.511293872350284, + "grad_norm": 0.21475251019001007, + "learning_rate": 3.258298261194126e-05, + "loss": 0.2583, + "step": 65040 + }, + { + "epoch": 2.5116799876443103, + "grad_norm": 1.272985577583313, + "learning_rate": 3.255724159233947e-05, + "loss": 0.3891, + "step": 65050 + }, + { + "epoch": 2.5120661029383373, + "grad_norm": 0.8311867713928223, + "learning_rate": 3.253150057273769e-05, + "loss": 0.1536, + "step": 65060 + }, + { + "epoch": 2.5124522182323643, + "grad_norm": 2.999817371368408, + "learning_rate": 3.2505759553135904e-05, + "loss": 0.2331, + "step": 65070 + }, + { + "epoch": 2.512838333526391, + "grad_norm": 0.4586697518825531, + "learning_rate": 3.248001853353411e-05, + "loss": 0.1496, + "step": 65080 + }, + { + "epoch": 2.513224448820418, + "grad_norm": 0.38037627935409546, + "learning_rate": 3.245427751393233e-05, + "loss": 0.1769, + "step": 65090 + }, + { + "epoch": 2.513610564114445, + "grad_norm": 0.9884114265441895, + "learning_rate": 3.2428536494330544e-05, + "loss": 0.157, + "step": 65100 + }, + { + "epoch": 2.5139966794084714, + "grad_norm": 2.8576581478118896, + "learning_rate": 3.240279547472875e-05, + "loss": 0.1484, + "step": 65110 + }, + { + "epoch": 2.514382794702498, + "grad_norm": 1.7361352443695068, + "learning_rate": 3.237705445512697e-05, + "loss": 0.1315, + "step": 65120 + }, + { + "epoch": 2.514768909996525, + "grad_norm": 1.4289588928222656, + "learning_rate": 3.2351313435525184e-05, + "loss": 0.1235, + "step": 65130 + }, + { + "epoch": 2.515155025290552, + "grad_norm": 2.256578207015991, + "learning_rate": 3.23255724159234e-05, + "loss": 0.1026, + "step": 65140 + }, + { + "epoch": 2.5155411405845785, + "grad_norm": 2.3927180767059326, + "learning_rate": 3.229983139632161e-05, + "loss": 0.1264, + "step": 65150 + }, + { + "epoch": 2.5159272558786054, + "grad_norm": 0.7600728869438171, + "learning_rate": 3.2274090376719824e-05, + "loss": 0.1091, + "step": 65160 + }, + { + "epoch": 2.516313371172632, + "grad_norm": 1.196343183517456, + "learning_rate": 3.224834935711804e-05, + "loss": 0.1419, + "step": 65170 + }, + { + "epoch": 2.516699486466659, + "grad_norm": 1.8273401260375977, + "learning_rate": 3.222260833751625e-05, + "loss": 0.1519, + "step": 65180 + }, + { + "epoch": 2.5170856017606855, + "grad_norm": 0.586053192615509, + "learning_rate": 3.2196867317914463e-05, + "loss": 0.1348, + "step": 65190 + }, + { + "epoch": 2.5174717170547125, + "grad_norm": 0.34410399198532104, + "learning_rate": 3.217112629831268e-05, + "loss": 0.2406, + "step": 65200 + }, + { + "epoch": 2.5178578323487395, + "grad_norm": 1.9117828607559204, + "learning_rate": 3.214538527871089e-05, + "loss": 0.1669, + "step": 65210 + }, + { + "epoch": 2.518243947642766, + "grad_norm": 0.3416088819503784, + "learning_rate": 3.2119644259109103e-05, + "loss": 0.0738, + "step": 65220 + }, + { + "epoch": 2.518630062936793, + "grad_norm": 2.7562408447265625, + "learning_rate": 3.209390323950732e-05, + "loss": 0.2793, + "step": 65230 + }, + { + "epoch": 2.5190161782308196, + "grad_norm": 0.18978220224380493, + "learning_rate": 3.206816221990554e-05, + "loss": 0.2145, + "step": 65240 + }, + { + "epoch": 2.5194022935248466, + "grad_norm": 1.9624252319335938, + "learning_rate": 3.204242120030374e-05, + "loss": 0.2203, + "step": 65250 + }, + { + "epoch": 2.519788408818873, + "grad_norm": 1.9377204179763794, + "learning_rate": 3.201668018070196e-05, + "loss": 0.1587, + "step": 65260 + }, + { + "epoch": 2.5201745241129, + "grad_norm": 0.5279117226600647, + "learning_rate": 3.199093916110018e-05, + "loss": 0.1874, + "step": 65270 + }, + { + "epoch": 2.520560639406927, + "grad_norm": 1.3398821353912354, + "learning_rate": 3.196519814149839e-05, + "loss": 0.1258, + "step": 65280 + }, + { + "epoch": 2.5209467547009536, + "grad_norm": 3.5301899909973145, + "learning_rate": 3.1939457121896597e-05, + "loss": 0.2119, + "step": 65290 + }, + { + "epoch": 2.5213328699949806, + "grad_norm": 1.9934186935424805, + "learning_rate": 3.191371610229482e-05, + "loss": 0.1813, + "step": 65300 + }, + { + "epoch": 2.521718985289007, + "grad_norm": 0.049756068736314774, + "learning_rate": 3.188797508269303e-05, + "loss": 0.1224, + "step": 65310 + }, + { + "epoch": 2.522105100583034, + "grad_norm": 0.14521420001983643, + "learning_rate": 3.1862234063091237e-05, + "loss": 0.2149, + "step": 65320 + }, + { + "epoch": 2.5224912158770607, + "grad_norm": 1.602318525314331, + "learning_rate": 3.183649304348945e-05, + "loss": 0.2355, + "step": 65330 + }, + { + "epoch": 2.5228773311710877, + "grad_norm": 0.8942916989326477, + "learning_rate": 3.181075202388767e-05, + "loss": 0.0873, + "step": 65340 + }, + { + "epoch": 2.5232634464651147, + "grad_norm": 1.184981107711792, + "learning_rate": 3.178501100428588e-05, + "loss": 0.2573, + "step": 65350 + }, + { + "epoch": 2.5236495617591412, + "grad_norm": 1.4945077896118164, + "learning_rate": 3.175926998468409e-05, + "loss": 0.131, + "step": 65360 + }, + { + "epoch": 2.5240356770531682, + "grad_norm": 0.2049487978219986, + "learning_rate": 3.173352896508231e-05, + "loss": 0.0353, + "step": 65370 + }, + { + "epoch": 2.5244217923471948, + "grad_norm": 0.9006851315498352, + "learning_rate": 3.170778794548052e-05, + "loss": 0.1341, + "step": 65380 + }, + { + "epoch": 2.5248079076412218, + "grad_norm": 0.08327820897102356, + "learning_rate": 3.1682046925878736e-05, + "loss": 0.1907, + "step": 65390 + }, + { + "epoch": 2.5251940229352483, + "grad_norm": 0.19701172411441803, + "learning_rate": 3.165630590627695e-05, + "loss": 0.131, + "step": 65400 + }, + { + "epoch": 2.5255801382292753, + "grad_norm": 2.7013745307922363, + "learning_rate": 3.163056488667516e-05, + "loss": 0.3517, + "step": 65410 + }, + { + "epoch": 2.5259662535233023, + "grad_norm": 0.20807845890522003, + "learning_rate": 3.1604823867073376e-05, + "loss": 0.0998, + "step": 65420 + }, + { + "epoch": 2.526352368817329, + "grad_norm": 0.6886903643608093, + "learning_rate": 3.157908284747159e-05, + "loss": 0.1197, + "step": 65430 + }, + { + "epoch": 2.5267384841113554, + "grad_norm": 0.42514756321907043, + "learning_rate": 3.15533418278698e-05, + "loss": 0.1692, + "step": 65440 + }, + { + "epoch": 2.5271245994053824, + "grad_norm": 0.26355358958244324, + "learning_rate": 3.1527600808268016e-05, + "loss": 0.121, + "step": 65450 + }, + { + "epoch": 2.5275107146994094, + "grad_norm": 0.14379052817821503, + "learning_rate": 3.150185978866623e-05, + "loss": 0.2221, + "step": 65460 + }, + { + "epoch": 2.527896829993436, + "grad_norm": 0.0977015420794487, + "learning_rate": 3.147611876906444e-05, + "loss": 0.158, + "step": 65470 + }, + { + "epoch": 2.528282945287463, + "grad_norm": 0.6228841543197632, + "learning_rate": 3.1450377749462656e-05, + "loss": 0.2043, + "step": 65480 + }, + { + "epoch": 2.52866906058149, + "grad_norm": 0.10137589275836945, + "learning_rate": 3.1424636729860876e-05, + "loss": 0.2369, + "step": 65490 + }, + { + "epoch": 2.5290551758755164, + "grad_norm": 0.20083734393119812, + "learning_rate": 3.139889571025908e-05, + "loss": 0.1989, + "step": 65500 + }, + { + "epoch": 2.529441291169543, + "grad_norm": 0.35362812876701355, + "learning_rate": 3.1373154690657296e-05, + "loss": 0.2544, + "step": 65510 + }, + { + "epoch": 2.52982740646357, + "grad_norm": 0.8700111508369446, + "learning_rate": 3.1347413671055516e-05, + "loss": 0.112, + "step": 65520 + }, + { + "epoch": 2.530213521757597, + "grad_norm": 0.0802641287446022, + "learning_rate": 3.132167265145372e-05, + "loss": 0.0596, + "step": 65530 + }, + { + "epoch": 2.5305996370516235, + "grad_norm": 1.380710244178772, + "learning_rate": 3.1295931631851936e-05, + "loss": 0.1295, + "step": 65540 + }, + { + "epoch": 2.5309857523456505, + "grad_norm": 0.9784635901451111, + "learning_rate": 3.1270190612250156e-05, + "loss": 0.2425, + "step": 65550 + }, + { + "epoch": 2.5313718676396775, + "grad_norm": 0.09004099667072296, + "learning_rate": 3.124444959264837e-05, + "loss": 0.0659, + "step": 65560 + }, + { + "epoch": 2.531757982933704, + "grad_norm": 0.33668985962867737, + "learning_rate": 3.1218708573046576e-05, + "loss": 0.1979, + "step": 65570 + }, + { + "epoch": 2.5321440982277306, + "grad_norm": 0.39336487650871277, + "learning_rate": 3.119296755344479e-05, + "loss": 0.17, + "step": 65580 + }, + { + "epoch": 2.5325302135217576, + "grad_norm": 0.6237707734107971, + "learning_rate": 3.116722653384301e-05, + "loss": 0.0943, + "step": 65590 + }, + { + "epoch": 2.5329163288157845, + "grad_norm": 0.1977011263370514, + "learning_rate": 3.114148551424122e-05, + "loss": 0.1265, + "step": 65600 + }, + { + "epoch": 2.533302444109811, + "grad_norm": 0.7314585447311401, + "learning_rate": 3.111574449463943e-05, + "loss": 0.1159, + "step": 65610 + }, + { + "epoch": 2.533688559403838, + "grad_norm": 1.6389861106872559, + "learning_rate": 3.109000347503765e-05, + "loss": 0.1515, + "step": 65620 + }, + { + "epoch": 2.5340746746978646, + "grad_norm": 0.25401124358177185, + "learning_rate": 3.106426245543586e-05, + "loss": 0.2054, + "step": 65630 + }, + { + "epoch": 2.5344607899918916, + "grad_norm": 0.7747787833213806, + "learning_rate": 3.103852143583407e-05, + "loss": 0.0944, + "step": 65640 + }, + { + "epoch": 2.534846905285918, + "grad_norm": 2.0066640377044678, + "learning_rate": 3.101278041623229e-05, + "loss": 0.1223, + "step": 65650 + }, + { + "epoch": 2.535233020579945, + "grad_norm": 0.3246127665042877, + "learning_rate": 3.09870393966305e-05, + "loss": 0.2998, + "step": 65660 + }, + { + "epoch": 2.535619135873972, + "grad_norm": 0.6192534565925598, + "learning_rate": 3.0961298377028716e-05, + "loss": 0.0864, + "step": 65670 + }, + { + "epoch": 2.5360052511679987, + "grad_norm": 1.500116229057312, + "learning_rate": 3.093555735742693e-05, + "loss": 0.1384, + "step": 65680 + }, + { + "epoch": 2.5363913664620257, + "grad_norm": 1.661163091659546, + "learning_rate": 3.090981633782514e-05, + "loss": 0.1907, + "step": 65690 + }, + { + "epoch": 2.536777481756052, + "grad_norm": 0.46657463908195496, + "learning_rate": 3.0884075318223356e-05, + "loss": 0.2575, + "step": 65700 + }, + { + "epoch": 2.537163597050079, + "grad_norm": 1.0207927227020264, + "learning_rate": 3.085833429862157e-05, + "loss": 0.1799, + "step": 65710 + }, + { + "epoch": 2.5375497123441058, + "grad_norm": 0.5044147372245789, + "learning_rate": 3.083259327901978e-05, + "loss": 0.2646, + "step": 65720 + }, + { + "epoch": 2.5379358276381327, + "grad_norm": 0.6330855488777161, + "learning_rate": 3.0806852259417996e-05, + "loss": 0.2243, + "step": 65730 + }, + { + "epoch": 2.5383219429321597, + "grad_norm": 0.12088367342948914, + "learning_rate": 3.0781111239816216e-05, + "loss": 0.1685, + "step": 65740 + }, + { + "epoch": 2.5387080582261863, + "grad_norm": 1.5174821615219116, + "learning_rate": 3.075537022021442e-05, + "loss": 0.081, + "step": 65750 + }, + { + "epoch": 2.5390941735202133, + "grad_norm": 0.5644610524177551, + "learning_rate": 3.0729629200612636e-05, + "loss": 0.1046, + "step": 65760 + }, + { + "epoch": 2.53948028881424, + "grad_norm": 1.9365503787994385, + "learning_rate": 3.0703888181010856e-05, + "loss": 0.1162, + "step": 65770 + }, + { + "epoch": 2.539866404108267, + "grad_norm": 0.46223533153533936, + "learning_rate": 3.067814716140906e-05, + "loss": 0.1218, + "step": 65780 + }, + { + "epoch": 2.5402525194022934, + "grad_norm": 2.0017831325531006, + "learning_rate": 3.0652406141807276e-05, + "loss": 0.2885, + "step": 65790 + }, + { + "epoch": 2.5406386346963203, + "grad_norm": 1.7874171733856201, + "learning_rate": 3.0626665122205496e-05, + "loss": 0.1561, + "step": 65800 + }, + { + "epoch": 2.5410247499903473, + "grad_norm": 0.6481704115867615, + "learning_rate": 3.060092410260371e-05, + "loss": 0.188, + "step": 65810 + }, + { + "epoch": 2.541410865284374, + "grad_norm": 1.0805027484893799, + "learning_rate": 3.0575183083001916e-05, + "loss": 0.1335, + "step": 65820 + }, + { + "epoch": 2.541796980578401, + "grad_norm": 2.5876686573028564, + "learning_rate": 3.0549442063400136e-05, + "loss": 0.0836, + "step": 65830 + }, + { + "epoch": 2.5421830958724274, + "grad_norm": 0.9550105929374695, + "learning_rate": 3.052370104379835e-05, + "loss": 0.2, + "step": 65840 + }, + { + "epoch": 2.5425692111664544, + "grad_norm": 0.48512884974479675, + "learning_rate": 3.049796002419656e-05, + "loss": 0.089, + "step": 65850 + }, + { + "epoch": 2.542955326460481, + "grad_norm": 0.8065679669380188, + "learning_rate": 3.0472219004594772e-05, + "loss": 0.1408, + "step": 65860 + }, + { + "epoch": 2.543341441754508, + "grad_norm": 0.32501792907714844, + "learning_rate": 3.044647798499299e-05, + "loss": 0.3102, + "step": 65870 + }, + { + "epoch": 2.543727557048535, + "grad_norm": 0.521720826625824, + "learning_rate": 3.04207369653912e-05, + "loss": 0.1617, + "step": 65880 + }, + { + "epoch": 2.5441136723425615, + "grad_norm": 0.4469434320926666, + "learning_rate": 3.0394995945789412e-05, + "loss": 0.0619, + "step": 65890 + }, + { + "epoch": 2.544499787636588, + "grad_norm": 1.0342704057693481, + "learning_rate": 3.036925492618763e-05, + "loss": 0.1, + "step": 65900 + }, + { + "epoch": 2.544885902930615, + "grad_norm": 0.5880559086799622, + "learning_rate": 3.0343513906585842e-05, + "loss": 0.1031, + "step": 65910 + }, + { + "epoch": 2.545272018224642, + "grad_norm": 0.065493643283844, + "learning_rate": 3.0317772886984052e-05, + "loss": 0.1029, + "step": 65920 + }, + { + "epoch": 2.5456581335186685, + "grad_norm": 0.6159729361534119, + "learning_rate": 3.029203186738227e-05, + "loss": 0.0852, + "step": 65930 + }, + { + "epoch": 2.5460442488126955, + "grad_norm": 1.618788242340088, + "learning_rate": 3.0266290847780482e-05, + "loss": 0.2189, + "step": 65940 + }, + { + "epoch": 2.5464303641067225, + "grad_norm": 4.030269145965576, + "learning_rate": 3.0240549828178692e-05, + "loss": 0.3216, + "step": 65950 + }, + { + "epoch": 2.546816479400749, + "grad_norm": 1.813103437423706, + "learning_rate": 3.0214808808576912e-05, + "loss": 0.2134, + "step": 65960 + }, + { + "epoch": 2.5472025946947756, + "grad_norm": 0.5134888887405396, + "learning_rate": 3.0189067788975122e-05, + "loss": 0.1133, + "step": 65970 + }, + { + "epoch": 2.5475887099888026, + "grad_norm": 0.027485152706503868, + "learning_rate": 3.0163326769373335e-05, + "loss": 0.0954, + "step": 65980 + }, + { + "epoch": 2.5479748252828296, + "grad_norm": 1.0274910926818848, + "learning_rate": 3.0137585749771552e-05, + "loss": 0.3188, + "step": 65990 + }, + { + "epoch": 2.548360940576856, + "grad_norm": 0.556398332118988, + "learning_rate": 3.0111844730169762e-05, + "loss": 0.0638, + "step": 66000 + }, + { + "epoch": 2.548747055870883, + "grad_norm": 0.16730284690856934, + "learning_rate": 3.0086103710567975e-05, + "loss": 0.1002, + "step": 66010 + }, + { + "epoch": 2.54913317116491, + "grad_norm": 0.401558518409729, + "learning_rate": 3.0060362690966192e-05, + "loss": 0.1537, + "step": 66020 + }, + { + "epoch": 2.5495192864589367, + "grad_norm": 0.8240194916725159, + "learning_rate": 3.0034621671364405e-05, + "loss": 0.2183, + "step": 66030 + }, + { + "epoch": 2.549905401752963, + "grad_norm": 0.34985408186912537, + "learning_rate": 3.0008880651762615e-05, + "loss": 0.2507, + "step": 66040 + }, + { + "epoch": 2.55029151704699, + "grad_norm": 1.4406944513320923, + "learning_rate": 2.9983139632160835e-05, + "loss": 0.2086, + "step": 66050 + }, + { + "epoch": 2.550677632341017, + "grad_norm": 1.0392922163009644, + "learning_rate": 2.9957398612559045e-05, + "loss": 0.2622, + "step": 66060 + }, + { + "epoch": 2.5510637476350437, + "grad_norm": 0.07720334827899933, + "learning_rate": 2.993165759295726e-05, + "loss": 0.2646, + "step": 66070 + }, + { + "epoch": 2.5514498629290707, + "grad_norm": 1.9083623886108398, + "learning_rate": 2.9905916573355475e-05, + "loss": 0.1832, + "step": 66080 + }, + { + "epoch": 2.5518359782230973, + "grad_norm": 0.7062809467315674, + "learning_rate": 2.9880175553753685e-05, + "loss": 0.2515, + "step": 66090 + }, + { + "epoch": 2.5522220935171243, + "grad_norm": 2.6205830574035645, + "learning_rate": 2.98544345341519e-05, + "loss": 0.1872, + "step": 66100 + }, + { + "epoch": 2.552608208811151, + "grad_norm": 0.6591269373893738, + "learning_rate": 2.982869351455011e-05, + "loss": 0.2101, + "step": 66110 + }, + { + "epoch": 2.552994324105178, + "grad_norm": 0.11720894277095795, + "learning_rate": 2.980295249494833e-05, + "loss": 0.0773, + "step": 66120 + }, + { + "epoch": 2.553380439399205, + "grad_norm": 0.4956444799900055, + "learning_rate": 2.977721147534654e-05, + "loss": 0.0854, + "step": 66130 + }, + { + "epoch": 2.5537665546932313, + "grad_norm": 0.11073987185955048, + "learning_rate": 2.9751470455744752e-05, + "loss": 0.0968, + "step": 66140 + }, + { + "epoch": 2.5541526699872583, + "grad_norm": 0.1464674174785614, + "learning_rate": 2.972572943614297e-05, + "loss": 0.1609, + "step": 66150 + }, + { + "epoch": 2.554538785281285, + "grad_norm": 1.9735543727874756, + "learning_rate": 2.9699988416541182e-05, + "loss": 0.2884, + "step": 66160 + }, + { + "epoch": 2.554924900575312, + "grad_norm": 0.5370622873306274, + "learning_rate": 2.9674247396939392e-05, + "loss": 0.1345, + "step": 66170 + }, + { + "epoch": 2.5553110158693384, + "grad_norm": 1.845221996307373, + "learning_rate": 2.964850637733761e-05, + "loss": 0.1761, + "step": 66180 + }, + { + "epoch": 2.5556971311633654, + "grad_norm": 0.051780425012111664, + "learning_rate": 2.9622765357735822e-05, + "loss": 0.1886, + "step": 66190 + }, + { + "epoch": 2.5560832464573924, + "grad_norm": 2.163306713104248, + "learning_rate": 2.959702433813403e-05, + "loss": 0.2559, + "step": 66200 + }, + { + "epoch": 2.556469361751419, + "grad_norm": 1.9034422636032104, + "learning_rate": 2.9571283318532252e-05, + "loss": 0.1655, + "step": 66210 + }, + { + "epoch": 2.556855477045446, + "grad_norm": 0.1645793616771698, + "learning_rate": 2.954554229893046e-05, + "loss": 0.0944, + "step": 66220 + }, + { + "epoch": 2.5572415923394725, + "grad_norm": 0.0783633440732956, + "learning_rate": 2.9519801279328675e-05, + "loss": 0.1487, + "step": 66230 + }, + { + "epoch": 2.5576277076334994, + "grad_norm": 1.8718386888504028, + "learning_rate": 2.949406025972689e-05, + "loss": 0.1607, + "step": 66240 + }, + { + "epoch": 2.558013822927526, + "grad_norm": 0.23518329858779907, + "learning_rate": 2.94683192401251e-05, + "loss": 0.0665, + "step": 66250 + }, + { + "epoch": 2.558399938221553, + "grad_norm": 2.0301125049591064, + "learning_rate": 2.9442578220523315e-05, + "loss": 0.2067, + "step": 66260 + }, + { + "epoch": 2.55878605351558, + "grad_norm": 0.8637131452560425, + "learning_rate": 2.941683720092153e-05, + "loss": 0.1247, + "step": 66270 + }, + { + "epoch": 2.5591721688096065, + "grad_norm": 3.191856861114502, + "learning_rate": 2.9391096181319745e-05, + "loss": 0.1793, + "step": 66280 + }, + { + "epoch": 2.5595582841036335, + "grad_norm": 2.5240116119384766, + "learning_rate": 2.9365355161717955e-05, + "loss": 0.2405, + "step": 66290 + }, + { + "epoch": 2.55994439939766, + "grad_norm": 0.651969313621521, + "learning_rate": 2.933961414211617e-05, + "loss": 0.236, + "step": 66300 + }, + { + "epoch": 2.560330514691687, + "grad_norm": 0.6139543056488037, + "learning_rate": 2.9313873122514385e-05, + "loss": 0.0929, + "step": 66310 + }, + { + "epoch": 2.5607166299857136, + "grad_norm": 2.219248056411743, + "learning_rate": 2.9288132102912598e-05, + "loss": 0.1579, + "step": 66320 + }, + { + "epoch": 2.5611027452797406, + "grad_norm": 1.0484322309494019, + "learning_rate": 2.9262391083310815e-05, + "loss": 0.0948, + "step": 66330 + }, + { + "epoch": 2.5614888605737676, + "grad_norm": 4.444892406463623, + "learning_rate": 2.9236650063709025e-05, + "loss": 0.2459, + "step": 66340 + }, + { + "epoch": 2.561874975867794, + "grad_norm": 0.5715140700340271, + "learning_rate": 2.9210909044107238e-05, + "loss": 0.1527, + "step": 66350 + }, + { + "epoch": 2.5622610911618207, + "grad_norm": 1.5631526708602905, + "learning_rate": 2.9185168024505448e-05, + "loss": 0.2262, + "step": 66360 + }, + { + "epoch": 2.5626472064558476, + "grad_norm": 0.5048274397850037, + "learning_rate": 2.9159427004903668e-05, + "loss": 0.1175, + "step": 66370 + }, + { + "epoch": 2.5630333217498746, + "grad_norm": 1.8589451313018799, + "learning_rate": 2.9133685985301878e-05, + "loss": 0.1788, + "step": 66380 + }, + { + "epoch": 2.563419437043901, + "grad_norm": 0.7230979800224304, + "learning_rate": 2.910794496570009e-05, + "loss": 0.1978, + "step": 66390 + }, + { + "epoch": 2.563805552337928, + "grad_norm": 0.34469518065452576, + "learning_rate": 2.9082203946098308e-05, + "loss": 0.2338, + "step": 66400 + }, + { + "epoch": 2.564191667631955, + "grad_norm": 0.396876722574234, + "learning_rate": 2.9056462926496518e-05, + "loss": 0.1059, + "step": 66410 + }, + { + "epoch": 2.5645777829259817, + "grad_norm": 0.10169263184070587, + "learning_rate": 2.903072190689473e-05, + "loss": 0.1637, + "step": 66420 + }, + { + "epoch": 2.5649638982200083, + "grad_norm": 0.9067368507385254, + "learning_rate": 2.9004980887292948e-05, + "loss": 0.1774, + "step": 66430 + }, + { + "epoch": 2.5653500135140352, + "grad_norm": 0.03508066385984421, + "learning_rate": 2.897923986769116e-05, + "loss": 0.1696, + "step": 66440 + }, + { + "epoch": 2.5657361288080622, + "grad_norm": 0.8411409258842468, + "learning_rate": 2.895349884808937e-05, + "loss": 0.1355, + "step": 66450 + }, + { + "epoch": 2.566122244102089, + "grad_norm": 9.54201602935791, + "learning_rate": 2.8927757828487588e-05, + "loss": 0.2578, + "step": 66460 + }, + { + "epoch": 2.5665083593961158, + "grad_norm": 0.5490165948867798, + "learning_rate": 2.89020168088858e-05, + "loss": 0.2095, + "step": 66470 + }, + { + "epoch": 2.5668944746901423, + "grad_norm": 0.1423688530921936, + "learning_rate": 2.8876275789284015e-05, + "loss": 0.1465, + "step": 66480 + }, + { + "epoch": 2.5672805899841693, + "grad_norm": 1.456730604171753, + "learning_rate": 2.885053476968223e-05, + "loss": 0.1963, + "step": 66490 + }, + { + "epoch": 2.567666705278196, + "grad_norm": 1.5556591749191284, + "learning_rate": 2.882479375008044e-05, + "loss": 0.2851, + "step": 66500 + }, + { + "epoch": 2.568052820572223, + "grad_norm": 1.825986385345459, + "learning_rate": 2.8799052730478654e-05, + "loss": 0.2956, + "step": 66510 + }, + { + "epoch": 2.56843893586625, + "grad_norm": 1.5602234601974487, + "learning_rate": 2.877331171087687e-05, + "loss": 0.1656, + "step": 66520 + }, + { + "epoch": 2.5688250511602764, + "grad_norm": 0.17269374430179596, + "learning_rate": 2.8747570691275084e-05, + "loss": 0.2931, + "step": 66530 + }, + { + "epoch": 2.5692111664543034, + "grad_norm": 2.0602357387542725, + "learning_rate": 2.8721829671673294e-05, + "loss": 0.1741, + "step": 66540 + }, + { + "epoch": 2.56959728174833, + "grad_norm": 0.8411749005317688, + "learning_rate": 2.869608865207151e-05, + "loss": 0.2062, + "step": 66550 + }, + { + "epoch": 2.569983397042357, + "grad_norm": 1.9933018684387207, + "learning_rate": 2.8670347632469724e-05, + "loss": 0.0564, + "step": 66560 + }, + { + "epoch": 2.5703695123363834, + "grad_norm": 0.6383324861526489, + "learning_rate": 2.8644606612867934e-05, + "loss": 0.1954, + "step": 66570 + }, + { + "epoch": 2.5707556276304104, + "grad_norm": 0.6832618713378906, + "learning_rate": 2.8618865593266154e-05, + "loss": 0.1364, + "step": 66580 + }, + { + "epoch": 2.5711417429244374, + "grad_norm": 0.13984490931034088, + "learning_rate": 2.8593124573664364e-05, + "loss": 0.2051, + "step": 66590 + }, + { + "epoch": 2.571527858218464, + "grad_norm": 0.9289367198944092, + "learning_rate": 2.8567383554062578e-05, + "loss": 0.167, + "step": 66600 + }, + { + "epoch": 2.571913973512491, + "grad_norm": 2.27067232131958, + "learning_rate": 2.8541642534460794e-05, + "loss": 0.2125, + "step": 66610 + }, + { + "epoch": 2.5723000888065175, + "grad_norm": 2.3731513023376465, + "learning_rate": 2.8515901514859004e-05, + "loss": 0.1417, + "step": 66620 + }, + { + "epoch": 2.5726862041005445, + "grad_norm": 2.030726194381714, + "learning_rate": 2.8490160495257218e-05, + "loss": 0.1292, + "step": 66630 + }, + { + "epoch": 2.573072319394571, + "grad_norm": 2.3475704193115234, + "learning_rate": 2.846441947565543e-05, + "loss": 0.2046, + "step": 66640 + }, + { + "epoch": 2.573458434688598, + "grad_norm": 1.8660598993301392, + "learning_rate": 2.8438678456053648e-05, + "loss": 0.1539, + "step": 66650 + }, + { + "epoch": 2.573844549982625, + "grad_norm": 2.3513095378875732, + "learning_rate": 2.8412937436451858e-05, + "loss": 0.3371, + "step": 66660 + }, + { + "epoch": 2.5742306652766516, + "grad_norm": 0.40551525354385376, + "learning_rate": 2.838719641685007e-05, + "loss": 0.1595, + "step": 66670 + }, + { + "epoch": 2.5746167805706786, + "grad_norm": 0.5435059070587158, + "learning_rate": 2.8361455397248288e-05, + "loss": 0.1139, + "step": 66680 + }, + { + "epoch": 2.575002895864705, + "grad_norm": 0.13456226885318756, + "learning_rate": 2.83357143776465e-05, + "loss": 0.1576, + "step": 66690 + }, + { + "epoch": 2.575389011158732, + "grad_norm": 0.8292468786239624, + "learning_rate": 2.830997335804471e-05, + "loss": 0.3232, + "step": 66700 + }, + { + "epoch": 2.5757751264527586, + "grad_norm": 2.613285541534424, + "learning_rate": 2.8284232338442927e-05, + "loss": 0.3047, + "step": 66710 + }, + { + "epoch": 2.5761612417467856, + "grad_norm": 0.11251250654459, + "learning_rate": 2.825849131884114e-05, + "loss": 0.2049, + "step": 66720 + }, + { + "epoch": 2.5765473570408126, + "grad_norm": 0.4605161249637604, + "learning_rate": 2.823275029923935e-05, + "loss": 0.0768, + "step": 66730 + }, + { + "epoch": 2.576933472334839, + "grad_norm": 1.2005031108856201, + "learning_rate": 2.820700927963757e-05, + "loss": 0.2257, + "step": 66740 + }, + { + "epoch": 2.5773195876288657, + "grad_norm": 1.1498386859893799, + "learning_rate": 2.818126826003578e-05, + "loss": 0.0817, + "step": 66750 + }, + { + "epoch": 2.5777057029228927, + "grad_norm": 0.5442838072776794, + "learning_rate": 2.8155527240433994e-05, + "loss": 0.1027, + "step": 66760 + }, + { + "epoch": 2.5780918182169197, + "grad_norm": 0.026386337354779243, + "learning_rate": 2.812978622083221e-05, + "loss": 0.2398, + "step": 66770 + }, + { + "epoch": 2.5784779335109462, + "grad_norm": 1.8325613737106323, + "learning_rate": 2.810404520123042e-05, + "loss": 0.1616, + "step": 66780 + }, + { + "epoch": 2.578864048804973, + "grad_norm": 4.571000576019287, + "learning_rate": 2.8078304181628634e-05, + "loss": 0.327, + "step": 66790 + }, + { + "epoch": 2.579250164099, + "grad_norm": 0.88929682970047, + "learning_rate": 2.805256316202685e-05, + "loss": 0.128, + "step": 66800 + }, + { + "epoch": 2.5796362793930268, + "grad_norm": 0.5320045351982117, + "learning_rate": 2.8026822142425064e-05, + "loss": 0.043, + "step": 66810 + }, + { + "epoch": 2.5800223946870533, + "grad_norm": 2.667931079864502, + "learning_rate": 2.8001081122823274e-05, + "loss": 0.1023, + "step": 66820 + }, + { + "epoch": 2.5804085099810803, + "grad_norm": 0.5052358508110046, + "learning_rate": 2.7975340103221494e-05, + "loss": 0.1913, + "step": 66830 + }, + { + "epoch": 2.5807946252751073, + "grad_norm": 0.03477906435728073, + "learning_rate": 2.7949599083619704e-05, + "loss": 0.0771, + "step": 66840 + }, + { + "epoch": 2.581180740569134, + "grad_norm": 1.756790280342102, + "learning_rate": 2.7923858064017917e-05, + "loss": 0.1997, + "step": 66850 + }, + { + "epoch": 2.581566855863161, + "grad_norm": 0.2687755227088928, + "learning_rate": 2.7898117044416134e-05, + "loss": 0.1979, + "step": 66860 + }, + { + "epoch": 2.581952971157188, + "grad_norm": 0.3334960341453552, + "learning_rate": 2.7872376024814344e-05, + "loss": 0.1232, + "step": 66870 + }, + { + "epoch": 2.5823390864512143, + "grad_norm": 0.06918884068727493, + "learning_rate": 2.7846635005212557e-05, + "loss": 0.1476, + "step": 66880 + }, + { + "epoch": 2.582725201745241, + "grad_norm": 2.113374710083008, + "learning_rate": 2.7820893985610767e-05, + "loss": 0.2019, + "step": 66890 + }, + { + "epoch": 2.583111317039268, + "grad_norm": 0.7421366572380066, + "learning_rate": 2.7795152966008987e-05, + "loss": 0.1316, + "step": 66900 + }, + { + "epoch": 2.583497432333295, + "grad_norm": 0.5632963180541992, + "learning_rate": 2.7769411946407197e-05, + "loss": 0.2162, + "step": 66910 + }, + { + "epoch": 2.5838835476273214, + "grad_norm": 1.951395869255066, + "learning_rate": 2.774367092680541e-05, + "loss": 0.2977, + "step": 66920 + }, + { + "epoch": 2.5842696629213484, + "grad_norm": 1.139452338218689, + "learning_rate": 2.7717929907203627e-05, + "loss": 0.1505, + "step": 66930 + }, + { + "epoch": 2.584655778215375, + "grad_norm": 1.4778863191604614, + "learning_rate": 2.769218888760184e-05, + "loss": 0.167, + "step": 66940 + }, + { + "epoch": 2.585041893509402, + "grad_norm": 0.3687165379524231, + "learning_rate": 2.766644786800005e-05, + "loss": 0.0876, + "step": 66950 + }, + { + "epoch": 2.5854280088034285, + "grad_norm": 0.3774222433567047, + "learning_rate": 2.7640706848398267e-05, + "loss": 0.0574, + "step": 66960 + }, + { + "epoch": 2.5858141240974555, + "grad_norm": 1.060309886932373, + "learning_rate": 2.761496582879648e-05, + "loss": 0.1978, + "step": 66970 + }, + { + "epoch": 2.5862002393914825, + "grad_norm": 0.041595011949539185, + "learning_rate": 2.758922480919469e-05, + "loss": 0.1617, + "step": 66980 + }, + { + "epoch": 2.586586354685509, + "grad_norm": 1.0350271463394165, + "learning_rate": 2.756348378959291e-05, + "loss": 0.1443, + "step": 66990 + }, + { + "epoch": 2.586972469979536, + "grad_norm": 3.072813034057617, + "learning_rate": 2.753774276999112e-05, + "loss": 0.1084, + "step": 67000 + }, + { + "epoch": 2.5873585852735626, + "grad_norm": 1.4295400381088257, + "learning_rate": 2.7512001750389334e-05, + "loss": 0.2752, + "step": 67010 + }, + { + "epoch": 2.5877447005675895, + "grad_norm": 0.904931366443634, + "learning_rate": 2.748626073078755e-05, + "loss": 0.1908, + "step": 67020 + }, + { + "epoch": 2.588130815861616, + "grad_norm": 0.12692487239837646, + "learning_rate": 2.746051971118576e-05, + "loss": 0.1956, + "step": 67030 + }, + { + "epoch": 2.588516931155643, + "grad_norm": 3.306731700897217, + "learning_rate": 2.7434778691583974e-05, + "loss": 0.1552, + "step": 67040 + }, + { + "epoch": 2.58890304644967, + "grad_norm": 0.17633609473705292, + "learning_rate": 2.740903767198219e-05, + "loss": 0.2643, + "step": 67050 + }, + { + "epoch": 2.5892891617436966, + "grad_norm": 1.683794617652893, + "learning_rate": 2.7383296652380404e-05, + "loss": 0.1609, + "step": 67060 + }, + { + "epoch": 2.5896752770377236, + "grad_norm": 0.799902617931366, + "learning_rate": 2.7357555632778613e-05, + "loss": 0.1787, + "step": 67070 + }, + { + "epoch": 2.59006139233175, + "grad_norm": 0.41263818740844727, + "learning_rate": 2.733181461317683e-05, + "loss": 0.144, + "step": 67080 + }, + { + "epoch": 2.590447507625777, + "grad_norm": 0.018304159864783287, + "learning_rate": 2.7306073593575043e-05, + "loss": 0.3317, + "step": 67090 + }, + { + "epoch": 2.5908336229198037, + "grad_norm": 0.5893455743789673, + "learning_rate": 2.7280332573973257e-05, + "loss": 0.1116, + "step": 67100 + }, + { + "epoch": 2.5912197382138307, + "grad_norm": 0.04571494832634926, + "learning_rate": 2.7254591554371473e-05, + "loss": 0.0919, + "step": 67110 + }, + { + "epoch": 2.5916058535078577, + "grad_norm": 1.2573976516723633, + "learning_rate": 2.7228850534769683e-05, + "loss": 0.0927, + "step": 67120 + }, + { + "epoch": 2.591991968801884, + "grad_norm": 2.4016544818878174, + "learning_rate": 2.7203109515167897e-05, + "loss": 0.2308, + "step": 67130 + }, + { + "epoch": 2.592378084095911, + "grad_norm": 0.6153950691223145, + "learning_rate": 2.7177368495566113e-05, + "loss": 0.3398, + "step": 67140 + }, + { + "epoch": 2.5927641993899377, + "grad_norm": 0.638940155506134, + "learning_rate": 2.7151627475964327e-05, + "loss": 0.0721, + "step": 67150 + }, + { + "epoch": 2.5931503146839647, + "grad_norm": 2.8470890522003174, + "learning_rate": 2.7125886456362537e-05, + "loss": 0.3437, + "step": 67160 + }, + { + "epoch": 2.5935364299779913, + "grad_norm": 0.21816271543502808, + "learning_rate": 2.710014543676075e-05, + "loss": 0.2749, + "step": 67170 + }, + { + "epoch": 2.5939225452720183, + "grad_norm": 1.3057670593261719, + "learning_rate": 2.7074404417158967e-05, + "loss": 0.2564, + "step": 67180 + }, + { + "epoch": 2.5943086605660453, + "grad_norm": 3.9401612281799316, + "learning_rate": 2.7048663397557177e-05, + "loss": 0.1827, + "step": 67190 + }, + { + "epoch": 2.594694775860072, + "grad_norm": 0.09398512542247772, + "learning_rate": 2.702292237795539e-05, + "loss": 0.2714, + "step": 67200 + }, + { + "epoch": 2.5950808911540983, + "grad_norm": 1.3454128503799438, + "learning_rate": 2.6997181358353607e-05, + "loss": 0.1409, + "step": 67210 + }, + { + "epoch": 2.5954670064481253, + "grad_norm": 3.312215566635132, + "learning_rate": 2.697144033875182e-05, + "loss": 0.1655, + "step": 67220 + }, + { + "epoch": 2.5958531217421523, + "grad_norm": 0.40052536129951477, + "learning_rate": 2.694569931915003e-05, + "loss": 0.095, + "step": 67230 + }, + { + "epoch": 2.596239237036179, + "grad_norm": 2.1148762702941895, + "learning_rate": 2.6919958299548247e-05, + "loss": 0.1167, + "step": 67240 + }, + { + "epoch": 2.596625352330206, + "grad_norm": 0.539953887462616, + "learning_rate": 2.689421727994646e-05, + "loss": 0.2198, + "step": 67250 + }, + { + "epoch": 2.597011467624233, + "grad_norm": 0.6172623634338379, + "learning_rate": 2.6868476260344673e-05, + "loss": 0.0879, + "step": 67260 + }, + { + "epoch": 2.5973975829182594, + "grad_norm": 0.9759122729301453, + "learning_rate": 2.684273524074289e-05, + "loss": 0.1162, + "step": 67270 + }, + { + "epoch": 2.597783698212286, + "grad_norm": 1.2510347366333008, + "learning_rate": 2.68169942211411e-05, + "loss": 0.1106, + "step": 67280 + }, + { + "epoch": 2.598169813506313, + "grad_norm": 1.3545809984207153, + "learning_rate": 2.6791253201539313e-05, + "loss": 0.1538, + "step": 67290 + }, + { + "epoch": 2.59855592880034, + "grad_norm": 0.4334702789783478, + "learning_rate": 2.676551218193753e-05, + "loss": 0.1178, + "step": 67300 + }, + { + "epoch": 2.5989420440943665, + "grad_norm": 0.7618227601051331, + "learning_rate": 2.6739771162335743e-05, + "loss": 0.158, + "step": 67310 + }, + { + "epoch": 2.5993281593883935, + "grad_norm": 2.0803728103637695, + "learning_rate": 2.6714030142733953e-05, + "loss": 0.2248, + "step": 67320 + }, + { + "epoch": 2.5997142746824204, + "grad_norm": 0.7592846751213074, + "learning_rate": 2.668828912313217e-05, + "loss": 0.139, + "step": 67330 + }, + { + "epoch": 2.600100389976447, + "grad_norm": 2.20271372795105, + "learning_rate": 2.6662548103530383e-05, + "loss": 0.2735, + "step": 67340 + }, + { + "epoch": 2.6004865052704735, + "grad_norm": 2.25789213180542, + "learning_rate": 2.6636807083928593e-05, + "loss": 0.1816, + "step": 67350 + }, + { + "epoch": 2.6008726205645005, + "grad_norm": 1.8669871091842651, + "learning_rate": 2.6611066064326813e-05, + "loss": 0.2003, + "step": 67360 + }, + { + "epoch": 2.6012587358585275, + "grad_norm": 2.2639665603637695, + "learning_rate": 2.6585325044725023e-05, + "loss": 0.2869, + "step": 67370 + }, + { + "epoch": 2.601644851152554, + "grad_norm": 0.5188022255897522, + "learning_rate": 2.6559584025123236e-05, + "loss": 0.1267, + "step": 67380 + }, + { + "epoch": 2.602030966446581, + "grad_norm": 0.7478063106536865, + "learning_rate": 2.6533843005521453e-05, + "loss": 0.1506, + "step": 67390 + }, + { + "epoch": 2.6024170817406076, + "grad_norm": 0.05027804523706436, + "learning_rate": 2.6508101985919663e-05, + "loss": 0.1469, + "step": 67400 + }, + { + "epoch": 2.6028031970346346, + "grad_norm": 1.4990183115005493, + "learning_rate": 2.6482360966317876e-05, + "loss": 0.0823, + "step": 67410 + }, + { + "epoch": 2.603189312328661, + "grad_norm": 0.23909913003444672, + "learning_rate": 2.645661994671609e-05, + "loss": 0.1971, + "step": 67420 + }, + { + "epoch": 2.603575427622688, + "grad_norm": 0.03295808658003807, + "learning_rate": 2.6430878927114306e-05, + "loss": 0.0741, + "step": 67430 + }, + { + "epoch": 2.603961542916715, + "grad_norm": 1.407315731048584, + "learning_rate": 2.6405137907512516e-05, + "loss": 0.115, + "step": 67440 + }, + { + "epoch": 2.6043476582107417, + "grad_norm": 2.2319045066833496, + "learning_rate": 2.637939688791073e-05, + "loss": 0.2297, + "step": 67450 + }, + { + "epoch": 2.6047337735047686, + "grad_norm": 0.40752896666526794, + "learning_rate": 2.6353655868308946e-05, + "loss": 0.132, + "step": 67460 + }, + { + "epoch": 2.605119888798795, + "grad_norm": 1.6817177534103394, + "learning_rate": 2.632791484870716e-05, + "loss": 0.1508, + "step": 67470 + }, + { + "epoch": 2.605506004092822, + "grad_norm": 1.18791663646698, + "learning_rate": 2.630217382910537e-05, + "loss": 0.1663, + "step": 67480 + }, + { + "epoch": 2.6058921193868487, + "grad_norm": 2.44256329536438, + "learning_rate": 2.6276432809503586e-05, + "loss": 0.1656, + "step": 67490 + }, + { + "epoch": 2.6062782346808757, + "grad_norm": 0.49040651321411133, + "learning_rate": 2.62506917899018e-05, + "loss": 0.227, + "step": 67500 + }, + { + "epoch": 2.6066643499749027, + "grad_norm": 0.6817883849143982, + "learning_rate": 2.622495077030001e-05, + "loss": 0.0833, + "step": 67510 + }, + { + "epoch": 2.6070504652689293, + "grad_norm": 1.4250826835632324, + "learning_rate": 2.619920975069823e-05, + "loss": 0.0818, + "step": 67520 + }, + { + "epoch": 2.6074365805629562, + "grad_norm": 5.752524375915527, + "learning_rate": 2.617346873109644e-05, + "loss": 0.1824, + "step": 67530 + }, + { + "epoch": 2.607822695856983, + "grad_norm": 1.8009400367736816, + "learning_rate": 2.6147727711494653e-05, + "loss": 0.1534, + "step": 67540 + }, + { + "epoch": 2.6082088111510098, + "grad_norm": 0.7888918519020081, + "learning_rate": 2.612198669189287e-05, + "loss": 0.2632, + "step": 67550 + }, + { + "epoch": 2.6085949264450363, + "grad_norm": 1.1485899686813354, + "learning_rate": 2.609624567229108e-05, + "loss": 0.2014, + "step": 67560 + }, + { + "epoch": 2.6089810417390633, + "grad_norm": 0.017272522673010826, + "learning_rate": 2.6070504652689293e-05, + "loss": 0.1698, + "step": 67570 + }, + { + "epoch": 2.6093671570330903, + "grad_norm": 0.23312939703464508, + "learning_rate": 2.604476363308751e-05, + "loss": 0.11, + "step": 67580 + }, + { + "epoch": 2.609753272327117, + "grad_norm": 1.5409551858901978, + "learning_rate": 2.6019022613485723e-05, + "loss": 0.2031, + "step": 67590 + }, + { + "epoch": 2.610139387621144, + "grad_norm": 2.757416248321533, + "learning_rate": 2.5993281593883933e-05, + "loss": 0.2326, + "step": 67600 + }, + { + "epoch": 2.6105255029151704, + "grad_norm": 0.03714454174041748, + "learning_rate": 2.5967540574282153e-05, + "loss": 0.305, + "step": 67610 + }, + { + "epoch": 2.6109116182091974, + "grad_norm": 1.5993083715438843, + "learning_rate": 2.5941799554680363e-05, + "loss": 0.0492, + "step": 67620 + }, + { + "epoch": 2.611297733503224, + "grad_norm": 3.3023600578308105, + "learning_rate": 2.5916058535078576e-05, + "loss": 0.1256, + "step": 67630 + }, + { + "epoch": 2.611683848797251, + "grad_norm": 2.0448055267333984, + "learning_rate": 2.5890317515476793e-05, + "loss": 0.1496, + "step": 67640 + }, + { + "epoch": 2.612069964091278, + "grad_norm": 2.056248188018799, + "learning_rate": 2.5864576495875002e-05, + "loss": 0.1396, + "step": 67650 + }, + { + "epoch": 2.6124560793853044, + "grad_norm": 0.7189445495605469, + "learning_rate": 2.5838835476273216e-05, + "loss": 0.1375, + "step": 67660 + }, + { + "epoch": 2.612842194679331, + "grad_norm": 0.3849039077758789, + "learning_rate": 2.5813094456671426e-05, + "loss": 0.0882, + "step": 67670 + }, + { + "epoch": 2.613228309973358, + "grad_norm": 1.2116458415985107, + "learning_rate": 2.5787353437069646e-05, + "loss": 0.0892, + "step": 67680 + }, + { + "epoch": 2.613614425267385, + "grad_norm": 0.5601721405982971, + "learning_rate": 2.5761612417467856e-05, + "loss": 0.148, + "step": 67690 + }, + { + "epoch": 2.6140005405614115, + "grad_norm": 1.1883691549301147, + "learning_rate": 2.573587139786607e-05, + "loss": 0.1673, + "step": 67700 + }, + { + "epoch": 2.6143866558554385, + "grad_norm": 0.8299083113670349, + "learning_rate": 2.5710130378264286e-05, + "loss": 0.1871, + "step": 67710 + }, + { + "epoch": 2.6147727711494655, + "grad_norm": 0.6316946744918823, + "learning_rate": 2.5684389358662496e-05, + "loss": 0.0707, + "step": 67720 + }, + { + "epoch": 2.615158886443492, + "grad_norm": 0.5716143846511841, + "learning_rate": 2.565864833906071e-05, + "loss": 0.0993, + "step": 67730 + }, + { + "epoch": 2.6155450017375186, + "grad_norm": 0.6665957570075989, + "learning_rate": 2.5632907319458926e-05, + "loss": 0.1402, + "step": 67740 + }, + { + "epoch": 2.6159311170315456, + "grad_norm": 1.1837033033370972, + "learning_rate": 2.560716629985714e-05, + "loss": 0.0963, + "step": 67750 + }, + { + "epoch": 2.6163172323255726, + "grad_norm": 0.7204211354255676, + "learning_rate": 2.558142528025535e-05, + "loss": 0.1109, + "step": 67760 + }, + { + "epoch": 2.616703347619599, + "grad_norm": 0.04177774861454964, + "learning_rate": 2.555568426065357e-05, + "loss": 0.206, + "step": 67770 + }, + { + "epoch": 2.617089462913626, + "grad_norm": 0.8528016805648804, + "learning_rate": 2.552994324105178e-05, + "loss": 0.1477, + "step": 67780 + }, + { + "epoch": 2.6174755782076526, + "grad_norm": 1.8284223079681396, + "learning_rate": 2.5504202221449992e-05, + "loss": 0.2925, + "step": 67790 + }, + { + "epoch": 2.6178616935016796, + "grad_norm": 2.03399658203125, + "learning_rate": 2.547846120184821e-05, + "loss": 0.1716, + "step": 67800 + }, + { + "epoch": 2.618247808795706, + "grad_norm": 1.1871380805969238, + "learning_rate": 2.545272018224642e-05, + "loss": 0.1387, + "step": 67810 + }, + { + "epoch": 2.618633924089733, + "grad_norm": 0.3045734465122223, + "learning_rate": 2.5426979162644632e-05, + "loss": 0.1624, + "step": 67820 + }, + { + "epoch": 2.61902003938376, + "grad_norm": 1.4469716548919678, + "learning_rate": 2.540123814304285e-05, + "loss": 0.1534, + "step": 67830 + }, + { + "epoch": 2.6194061546777867, + "grad_norm": 0.7630550861358643, + "learning_rate": 2.5375497123441062e-05, + "loss": 0.1266, + "step": 67840 + }, + { + "epoch": 2.6197922699718137, + "grad_norm": 1.3296400308609009, + "learning_rate": 2.5349756103839272e-05, + "loss": 0.3268, + "step": 67850 + }, + { + "epoch": 2.6201783852658402, + "grad_norm": 0.7620146870613098, + "learning_rate": 2.532401508423749e-05, + "loss": 0.1747, + "step": 67860 + }, + { + "epoch": 2.6205645005598672, + "grad_norm": 1.4850629568099976, + "learning_rate": 2.5298274064635702e-05, + "loss": 0.1165, + "step": 67870 + }, + { + "epoch": 2.6209506158538938, + "grad_norm": 0.30754703283309937, + "learning_rate": 2.5272533045033915e-05, + "loss": 0.1603, + "step": 67880 + }, + { + "epoch": 2.6213367311479208, + "grad_norm": 0.47044405341148376, + "learning_rate": 2.5246792025432132e-05, + "loss": 0.1471, + "step": 67890 + }, + { + "epoch": 2.6217228464419478, + "grad_norm": 1.428301453590393, + "learning_rate": 2.5221051005830342e-05, + "loss": 0.1448, + "step": 67900 + }, + { + "epoch": 2.6221089617359743, + "grad_norm": 0.3132546544075012, + "learning_rate": 2.5195309986228555e-05, + "loss": 0.1899, + "step": 67910 + }, + { + "epoch": 2.6224950770300013, + "grad_norm": 0.36817577481269836, + "learning_rate": 2.5169568966626772e-05, + "loss": 0.2864, + "step": 67920 + }, + { + "epoch": 2.622881192324028, + "grad_norm": 0.8652348518371582, + "learning_rate": 2.5143827947024985e-05, + "loss": 0.0987, + "step": 67930 + }, + { + "epoch": 2.623267307618055, + "grad_norm": 0.5235974788665771, + "learning_rate": 2.5118086927423195e-05, + "loss": 0.3079, + "step": 67940 + }, + { + "epoch": 2.6236534229120814, + "grad_norm": 0.2852037847042084, + "learning_rate": 2.509234590782141e-05, + "loss": 0.1217, + "step": 67950 + }, + { + "epoch": 2.6240395382061084, + "grad_norm": 0.6256119608879089, + "learning_rate": 2.5066604888219625e-05, + "loss": 0.1645, + "step": 67960 + }, + { + "epoch": 2.6244256535001353, + "grad_norm": 1.0917539596557617, + "learning_rate": 2.5040863868617835e-05, + "loss": 0.1589, + "step": 67970 + }, + { + "epoch": 2.624811768794162, + "grad_norm": 1.1311125755310059, + "learning_rate": 2.501512284901605e-05, + "loss": 0.2388, + "step": 67980 + }, + { + "epoch": 2.625197884088189, + "grad_norm": 0.806238055229187, + "learning_rate": 2.4989381829414265e-05, + "loss": 0.2127, + "step": 67990 + }, + { + "epoch": 2.6255839993822154, + "grad_norm": 1.4400973320007324, + "learning_rate": 2.496364080981248e-05, + "loss": 0.0687, + "step": 68000 + }, + { + "epoch": 2.6259701146762424, + "grad_norm": 0.050153911113739014, + "learning_rate": 2.4937899790210692e-05, + "loss": 0.0564, + "step": 68010 + }, + { + "epoch": 2.626356229970269, + "grad_norm": 1.139260172843933, + "learning_rate": 2.4912158770608905e-05, + "loss": 0.0868, + "step": 68020 + }, + { + "epoch": 2.626742345264296, + "grad_norm": 1.2839637994766235, + "learning_rate": 2.488641775100712e-05, + "loss": 0.1589, + "step": 68030 + }, + { + "epoch": 2.627128460558323, + "grad_norm": 0.19808660447597504, + "learning_rate": 2.4860676731405332e-05, + "loss": 0.2015, + "step": 68040 + }, + { + "epoch": 2.6275145758523495, + "grad_norm": 0.38413748145103455, + "learning_rate": 2.4834935711803545e-05, + "loss": 0.1181, + "step": 68050 + }, + { + "epoch": 2.627900691146376, + "grad_norm": 0.5252083539962769, + "learning_rate": 2.480919469220176e-05, + "loss": 0.2141, + "step": 68060 + }, + { + "epoch": 2.628286806440403, + "grad_norm": 0.3906213641166687, + "learning_rate": 2.4783453672599975e-05, + "loss": 0.3083, + "step": 68070 + }, + { + "epoch": 2.62867292173443, + "grad_norm": 2.1290669441223145, + "learning_rate": 2.4757712652998185e-05, + "loss": 0.2155, + "step": 68080 + }, + { + "epoch": 2.6290590370284566, + "grad_norm": 1.0176451206207275, + "learning_rate": 2.4731971633396402e-05, + "loss": 0.1852, + "step": 68090 + }, + { + "epoch": 2.6294451523224835, + "grad_norm": 2.1066977977752686, + "learning_rate": 2.4706230613794615e-05, + "loss": 0.2177, + "step": 68100 + }, + { + "epoch": 2.6298312676165105, + "grad_norm": 2.267906427383423, + "learning_rate": 2.4680489594192825e-05, + "loss": 0.1359, + "step": 68110 + }, + { + "epoch": 2.630217382910537, + "grad_norm": 1.4150601625442505, + "learning_rate": 2.465474857459104e-05, + "loss": 0.1805, + "step": 68120 + }, + { + "epoch": 2.6306034982045636, + "grad_norm": 0.41347402334213257, + "learning_rate": 2.462900755498925e-05, + "loss": 0.095, + "step": 68130 + }, + { + "epoch": 2.6309896134985906, + "grad_norm": 1.3140255212783813, + "learning_rate": 2.4603266535387468e-05, + "loss": 0.0693, + "step": 68140 + }, + { + "epoch": 2.6313757287926176, + "grad_norm": 1.2731821537017822, + "learning_rate": 2.457752551578568e-05, + "loss": 0.2528, + "step": 68150 + }, + { + "epoch": 2.631761844086644, + "grad_norm": 2.4566001892089844, + "learning_rate": 2.4551784496183895e-05, + "loss": 0.1169, + "step": 68160 + }, + { + "epoch": 2.632147959380671, + "grad_norm": 2.265305280685425, + "learning_rate": 2.4526043476582108e-05, + "loss": 0.154, + "step": 68170 + }, + { + "epoch": 2.632534074674698, + "grad_norm": 0.5196200609207153, + "learning_rate": 2.450030245698032e-05, + "loss": 0.1029, + "step": 68180 + }, + { + "epoch": 2.6329201899687247, + "grad_norm": 2.5574257373809814, + "learning_rate": 2.4474561437378535e-05, + "loss": 0.2491, + "step": 68190 + }, + { + "epoch": 2.6333063052627512, + "grad_norm": 0.40821412205696106, + "learning_rate": 2.4448820417776748e-05, + "loss": 0.1842, + "step": 68200 + }, + { + "epoch": 2.633692420556778, + "grad_norm": 0.9594093561172485, + "learning_rate": 2.4423079398174965e-05, + "loss": 0.2135, + "step": 68210 + }, + { + "epoch": 2.634078535850805, + "grad_norm": 0.3707121014595032, + "learning_rate": 2.4397338378573175e-05, + "loss": 0.1715, + "step": 68220 + }, + { + "epoch": 2.6344646511448317, + "grad_norm": 0.9233579039573669, + "learning_rate": 2.437159735897139e-05, + "loss": 0.3378, + "step": 68230 + }, + { + "epoch": 2.6348507664388587, + "grad_norm": 1.8800396919250488, + "learning_rate": 2.4345856339369605e-05, + "loss": 0.0902, + "step": 68240 + }, + { + "epoch": 2.6352368817328853, + "grad_norm": 1.0025197267532349, + "learning_rate": 2.4320115319767818e-05, + "loss": 0.2189, + "step": 68250 + }, + { + "epoch": 2.6356229970269123, + "grad_norm": 0.03650035336613655, + "learning_rate": 2.429437430016603e-05, + "loss": 0.2025, + "step": 68260 + }, + { + "epoch": 2.636009112320939, + "grad_norm": 0.20000745356082916, + "learning_rate": 2.4268633280564245e-05, + "loss": 0.1395, + "step": 68270 + }, + { + "epoch": 2.636395227614966, + "grad_norm": 0.7981158494949341, + "learning_rate": 2.4242892260962458e-05, + "loss": 0.0875, + "step": 68280 + }, + { + "epoch": 2.636781342908993, + "grad_norm": 1.6767163276672363, + "learning_rate": 2.421715124136067e-05, + "loss": 0.2558, + "step": 68290 + }, + { + "epoch": 2.6371674582030193, + "grad_norm": 2.023684024810791, + "learning_rate": 2.4191410221758885e-05, + "loss": 0.2657, + "step": 68300 + }, + { + "epoch": 2.6375535734970463, + "grad_norm": 1.0396549701690674, + "learning_rate": 2.4165669202157098e-05, + "loss": 0.1199, + "step": 68310 + }, + { + "epoch": 2.637939688791073, + "grad_norm": 0.8373544216156006, + "learning_rate": 2.4139928182555315e-05, + "loss": 0.1273, + "step": 68320 + }, + { + "epoch": 2.6383258040851, + "grad_norm": 1.0113970041275024, + "learning_rate": 2.4114187162953525e-05, + "loss": 0.1111, + "step": 68330 + }, + { + "epoch": 2.6387119193791264, + "grad_norm": 0.08151128888130188, + "learning_rate": 2.4088446143351738e-05, + "loss": 0.1949, + "step": 68340 + }, + { + "epoch": 2.6390980346731534, + "grad_norm": 1.629394292831421, + "learning_rate": 2.4062705123749955e-05, + "loss": 0.1135, + "step": 68350 + }, + { + "epoch": 2.6394841499671804, + "grad_norm": 0.11504169553518295, + "learning_rate": 2.4036964104148165e-05, + "loss": 0.1591, + "step": 68360 + }, + { + "epoch": 2.639870265261207, + "grad_norm": 1.0481438636779785, + "learning_rate": 2.401122308454638e-05, + "loss": 0.1151, + "step": 68370 + }, + { + "epoch": 2.640256380555234, + "grad_norm": 1.7637771368026733, + "learning_rate": 2.3985482064944595e-05, + "loss": 0.2499, + "step": 68380 + }, + { + "epoch": 2.6406424958492605, + "grad_norm": 0.43667173385620117, + "learning_rate": 2.3959741045342808e-05, + "loss": 0.0512, + "step": 68390 + }, + { + "epoch": 2.6410286111432875, + "grad_norm": 0.9862222075462341, + "learning_rate": 2.393400002574102e-05, + "loss": 0.1, + "step": 68400 + }, + { + "epoch": 2.641414726437314, + "grad_norm": 0.996530294418335, + "learning_rate": 2.3908259006139234e-05, + "loss": 0.1679, + "step": 68410 + }, + { + "epoch": 2.641800841731341, + "grad_norm": 1.451374888420105, + "learning_rate": 2.3882517986537448e-05, + "loss": 0.2103, + "step": 68420 + }, + { + "epoch": 2.642186957025368, + "grad_norm": 0.2730307877063751, + "learning_rate": 2.385677696693566e-05, + "loss": 0.1522, + "step": 68430 + }, + { + "epoch": 2.6425730723193945, + "grad_norm": 0.06040269508957863, + "learning_rate": 2.3831035947333874e-05, + "loss": 0.1278, + "step": 68440 + }, + { + "epoch": 2.6429591876134215, + "grad_norm": 1.716790795326233, + "learning_rate": 2.3805294927732088e-05, + "loss": 0.2005, + "step": 68450 + }, + { + "epoch": 2.643345302907448, + "grad_norm": 1.4266630411148071, + "learning_rate": 2.3779553908130304e-05, + "loss": 0.084, + "step": 68460 + }, + { + "epoch": 2.643731418201475, + "grad_norm": 0.24637824296951294, + "learning_rate": 2.3753812888528514e-05, + "loss": 0.1616, + "step": 68470 + }, + { + "epoch": 2.6441175334955016, + "grad_norm": 0.1911696344614029, + "learning_rate": 2.372807186892673e-05, + "loss": 0.2609, + "step": 68480 + }, + { + "epoch": 2.6445036487895286, + "grad_norm": 0.5890191793441772, + "learning_rate": 2.3702330849324944e-05, + "loss": 0.0889, + "step": 68490 + }, + { + "epoch": 2.6448897640835556, + "grad_norm": 1.8803762197494507, + "learning_rate": 2.3676589829723154e-05, + "loss": 0.1945, + "step": 68500 + }, + { + "epoch": 2.645275879377582, + "grad_norm": 0.13470906019210815, + "learning_rate": 2.365084881012137e-05, + "loss": 0.2507, + "step": 68510 + }, + { + "epoch": 2.6456619946716087, + "grad_norm": 0.5220600962638855, + "learning_rate": 2.362510779051958e-05, + "loss": 0.1619, + "step": 68520 + }, + { + "epoch": 2.6460481099656357, + "grad_norm": 0.04346944019198418, + "learning_rate": 2.3599366770917798e-05, + "loss": 0.162, + "step": 68530 + }, + { + "epoch": 2.6464342252596627, + "grad_norm": 1.4460773468017578, + "learning_rate": 2.357362575131601e-05, + "loss": 0.2857, + "step": 68540 + }, + { + "epoch": 2.646820340553689, + "grad_norm": 1.9554592370986938, + "learning_rate": 2.3547884731714224e-05, + "loss": 0.1108, + "step": 68550 + }, + { + "epoch": 2.647206455847716, + "grad_norm": 0.596594512462616, + "learning_rate": 2.3522143712112438e-05, + "loss": 0.1234, + "step": 68560 + }, + { + "epoch": 2.647592571141743, + "grad_norm": 0.4433450996875763, + "learning_rate": 2.349640269251065e-05, + "loss": 0.2316, + "step": 68570 + }, + { + "epoch": 2.6479786864357697, + "grad_norm": 0.5461844801902771, + "learning_rate": 2.3470661672908864e-05, + "loss": 0.3235, + "step": 68580 + }, + { + "epoch": 2.6483648017297963, + "grad_norm": 0.3693888187408447, + "learning_rate": 2.3444920653307077e-05, + "loss": 0.2776, + "step": 68590 + }, + { + "epoch": 2.6487509170238233, + "grad_norm": 0.19041050970554352, + "learning_rate": 2.3419179633705294e-05, + "loss": 0.14, + "step": 68600 + }, + { + "epoch": 2.6491370323178502, + "grad_norm": 0.6536568999290466, + "learning_rate": 2.3393438614103504e-05, + "loss": 0.1418, + "step": 68610 + }, + { + "epoch": 2.649523147611877, + "grad_norm": 0.32842710614204407, + "learning_rate": 2.336769759450172e-05, + "loss": 0.1917, + "step": 68620 + }, + { + "epoch": 2.649909262905904, + "grad_norm": 1.8785744905471802, + "learning_rate": 2.3341956574899934e-05, + "loss": 0.0986, + "step": 68630 + }, + { + "epoch": 2.6502953781999308, + "grad_norm": 1.0904650688171387, + "learning_rate": 2.3316215555298147e-05, + "loss": 0.1419, + "step": 68640 + }, + { + "epoch": 2.6506814934939573, + "grad_norm": 0.49260103702545166, + "learning_rate": 2.329047453569636e-05, + "loss": 0.1906, + "step": 68650 + }, + { + "epoch": 2.651067608787984, + "grad_norm": 0.6473127007484436, + "learning_rate": 2.326473351609457e-05, + "loss": 0.1585, + "step": 68660 + }, + { + "epoch": 2.651453724082011, + "grad_norm": 0.6146073937416077, + "learning_rate": 2.3238992496492787e-05, + "loss": 0.1425, + "step": 68670 + }, + { + "epoch": 2.651839839376038, + "grad_norm": 1.9327075481414795, + "learning_rate": 2.3213251476891e-05, + "loss": 0.1849, + "step": 68680 + }, + { + "epoch": 2.6522259546700644, + "grad_norm": 2.2953224182128906, + "learning_rate": 2.3187510457289214e-05, + "loss": 0.2707, + "step": 68690 + }, + { + "epoch": 2.6526120699640914, + "grad_norm": 0.1470266729593277, + "learning_rate": 2.3161769437687427e-05, + "loss": 0.1072, + "step": 68700 + }, + { + "epoch": 2.652998185258118, + "grad_norm": 0.8499718308448792, + "learning_rate": 2.3136028418085644e-05, + "loss": 0.1817, + "step": 68710 + }, + { + "epoch": 2.653384300552145, + "grad_norm": 0.09872210770845413, + "learning_rate": 2.3110287398483854e-05, + "loss": 0.0622, + "step": 68720 + }, + { + "epoch": 2.6537704158461715, + "grad_norm": 0.6662464737892151, + "learning_rate": 2.3084546378882067e-05, + "loss": 0.1477, + "step": 68730 + }, + { + "epoch": 2.6541565311401984, + "grad_norm": 0.008908030577003956, + "learning_rate": 2.3058805359280284e-05, + "loss": 0.1977, + "step": 68740 + }, + { + "epoch": 2.6545426464342254, + "grad_norm": 0.6658633947372437, + "learning_rate": 2.3033064339678494e-05, + "loss": 0.1416, + "step": 68750 + }, + { + "epoch": 2.654928761728252, + "grad_norm": 2.4253549575805664, + "learning_rate": 2.300732332007671e-05, + "loss": 0.223, + "step": 68760 + }, + { + "epoch": 2.655314877022279, + "grad_norm": 0.08092183619737625, + "learning_rate": 2.2981582300474924e-05, + "loss": 0.1287, + "step": 68770 + }, + { + "epoch": 2.6557009923163055, + "grad_norm": 1.8364213705062866, + "learning_rate": 2.2955841280873137e-05, + "loss": 0.1968, + "step": 68780 + }, + { + "epoch": 2.6560871076103325, + "grad_norm": 0.2436826527118683, + "learning_rate": 2.293010026127135e-05, + "loss": 0.1102, + "step": 68790 + }, + { + "epoch": 2.656473222904359, + "grad_norm": 1.2653074264526367, + "learning_rate": 2.2904359241669564e-05, + "loss": 0.2286, + "step": 68800 + }, + { + "epoch": 2.656859338198386, + "grad_norm": 0.14631232619285583, + "learning_rate": 2.2878618222067777e-05, + "loss": 0.1099, + "step": 68810 + }, + { + "epoch": 2.657245453492413, + "grad_norm": 0.02367425337433815, + "learning_rate": 2.285287720246599e-05, + "loss": 0.1272, + "step": 68820 + }, + { + "epoch": 2.6576315687864396, + "grad_norm": 0.3960202932357788, + "learning_rate": 2.2827136182864204e-05, + "loss": 0.1486, + "step": 68830 + }, + { + "epoch": 2.6580176840804666, + "grad_norm": 0.7019187808036804, + "learning_rate": 2.2801395163262417e-05, + "loss": 0.137, + "step": 68840 + }, + { + "epoch": 2.658403799374493, + "grad_norm": 1.1136587858200073, + "learning_rate": 2.2775654143660634e-05, + "loss": 0.1244, + "step": 68850 + }, + { + "epoch": 2.65878991466852, + "grad_norm": 0.8158296942710876, + "learning_rate": 2.2749913124058844e-05, + "loss": 0.0528, + "step": 68860 + }, + { + "epoch": 2.6591760299625467, + "grad_norm": 2.001655101776123, + "learning_rate": 2.272417210445706e-05, + "loss": 0.0705, + "step": 68870 + }, + { + "epoch": 2.6595621452565736, + "grad_norm": 0.2512793242931366, + "learning_rate": 2.2698431084855274e-05, + "loss": 0.0891, + "step": 68880 + }, + { + "epoch": 2.6599482605506006, + "grad_norm": 2.8973586559295654, + "learning_rate": 2.2672690065253484e-05, + "loss": 0.0921, + "step": 68890 + }, + { + "epoch": 2.660334375844627, + "grad_norm": 2.393480062484741, + "learning_rate": 2.26469490456517e-05, + "loss": 0.2726, + "step": 68900 + }, + { + "epoch": 2.660720491138654, + "grad_norm": 3.534479856491089, + "learning_rate": 2.2621208026049914e-05, + "loss": 0.2297, + "step": 68910 + }, + { + "epoch": 2.6611066064326807, + "grad_norm": 1.4901084899902344, + "learning_rate": 2.2595467006448127e-05, + "loss": 0.1104, + "step": 68920 + }, + { + "epoch": 2.6614927217267077, + "grad_norm": 1.3615870475769043, + "learning_rate": 2.256972598684634e-05, + "loss": 0.2267, + "step": 68930 + }, + { + "epoch": 2.6618788370207342, + "grad_norm": 0.26768797636032104, + "learning_rate": 2.2543984967244554e-05, + "loss": 0.0842, + "step": 68940 + }, + { + "epoch": 2.6622649523147612, + "grad_norm": 0.5720809102058411, + "learning_rate": 2.2518243947642767e-05, + "loss": 0.0955, + "step": 68950 + }, + { + "epoch": 2.6626510676087882, + "grad_norm": 0.8448322415351868, + "learning_rate": 2.249250292804098e-05, + "loss": 0.1251, + "step": 68960 + }, + { + "epoch": 2.6630371829028148, + "grad_norm": 2.9490509033203125, + "learning_rate": 2.2466761908439193e-05, + "loss": 0.1593, + "step": 68970 + }, + { + "epoch": 2.6634232981968413, + "grad_norm": 1.1557024717330933, + "learning_rate": 2.2441020888837407e-05, + "loss": 0.1336, + "step": 68980 + }, + { + "epoch": 2.6638094134908683, + "grad_norm": 2.981727361679077, + "learning_rate": 2.2415279869235623e-05, + "loss": 0.1487, + "step": 68990 + }, + { + "epoch": 2.6641955287848953, + "grad_norm": 0.5381894707679749, + "learning_rate": 2.2389538849633833e-05, + "loss": 0.1814, + "step": 69000 + }, + { + "epoch": 2.664581644078922, + "grad_norm": 0.833191990852356, + "learning_rate": 2.236379783003205e-05, + "loss": 0.1515, + "step": 69010 + }, + { + "epoch": 2.664967759372949, + "grad_norm": 0.1587102711200714, + "learning_rate": 2.2338056810430263e-05, + "loss": 0.0568, + "step": 69020 + }, + { + "epoch": 2.665353874666976, + "grad_norm": 1.5014970302581787, + "learning_rate": 2.2312315790828477e-05, + "loss": 0.1992, + "step": 69030 + }, + { + "epoch": 2.6657399899610024, + "grad_norm": 0.060449715703725815, + "learning_rate": 2.228657477122669e-05, + "loss": 0.1523, + "step": 69040 + }, + { + "epoch": 2.666126105255029, + "grad_norm": 1.7199037075042725, + "learning_rate": 2.22608337516249e-05, + "loss": 0.1056, + "step": 69050 + }, + { + "epoch": 2.666512220549056, + "grad_norm": 0.430899441242218, + "learning_rate": 2.2235092732023117e-05, + "loss": 0.1376, + "step": 69060 + }, + { + "epoch": 2.666898335843083, + "grad_norm": 0.16108714044094086, + "learning_rate": 2.220935171242133e-05, + "loss": 0.1109, + "step": 69070 + }, + { + "epoch": 2.6672844511371094, + "grad_norm": 3.1773228645324707, + "learning_rate": 2.2183610692819543e-05, + "loss": 0.3158, + "step": 69080 + }, + { + "epoch": 2.6676705664311364, + "grad_norm": 1.568304419517517, + "learning_rate": 2.2157869673217757e-05, + "loss": 0.2408, + "step": 69090 + }, + { + "epoch": 2.668056681725163, + "grad_norm": 1.8924100399017334, + "learning_rate": 2.2132128653615973e-05, + "loss": 0.2268, + "step": 69100 + }, + { + "epoch": 2.66844279701919, + "grad_norm": 2.1422247886657715, + "learning_rate": 2.2106387634014183e-05, + "loss": 0.1685, + "step": 69110 + }, + { + "epoch": 2.6688289123132165, + "grad_norm": 0.727570652961731, + "learning_rate": 2.2080646614412396e-05, + "loss": 0.3019, + "step": 69120 + }, + { + "epoch": 2.6692150276072435, + "grad_norm": 1.1987897157669067, + "learning_rate": 2.2054905594810613e-05, + "loss": 0.1511, + "step": 69130 + }, + { + "epoch": 2.6696011429012705, + "grad_norm": 2.25412654876709, + "learning_rate": 2.2029164575208823e-05, + "loss": 0.1538, + "step": 69140 + }, + { + "epoch": 2.669987258195297, + "grad_norm": 0.4829877018928528, + "learning_rate": 2.200342355560704e-05, + "loss": 0.2204, + "step": 69150 + }, + { + "epoch": 2.670373373489324, + "grad_norm": 0.8249949812889099, + "learning_rate": 2.1977682536005253e-05, + "loss": 0.1574, + "step": 69160 + }, + { + "epoch": 2.6707594887833506, + "grad_norm": 0.47408896684646606, + "learning_rate": 2.1951941516403466e-05, + "loss": 0.131, + "step": 69170 + }, + { + "epoch": 2.6711456040773776, + "grad_norm": 3.192263126373291, + "learning_rate": 2.192620049680168e-05, + "loss": 0.1284, + "step": 69180 + }, + { + "epoch": 2.671531719371404, + "grad_norm": 1.6318609714508057, + "learning_rate": 2.1900459477199893e-05, + "loss": 0.182, + "step": 69190 + }, + { + "epoch": 2.671917834665431, + "grad_norm": 2.031730890274048, + "learning_rate": 2.1874718457598106e-05, + "loss": 0.1774, + "step": 69200 + }, + { + "epoch": 2.672303949959458, + "grad_norm": 0.053225722163915634, + "learning_rate": 2.184897743799632e-05, + "loss": 0.2002, + "step": 69210 + }, + { + "epoch": 2.6726900652534846, + "grad_norm": 1.1087912321090698, + "learning_rate": 2.1823236418394533e-05, + "loss": 0.3255, + "step": 69220 + }, + { + "epoch": 2.6730761805475116, + "grad_norm": 1.7376277446746826, + "learning_rate": 2.1797495398792746e-05, + "loss": 0.1705, + "step": 69230 + }, + { + "epoch": 2.673462295841538, + "grad_norm": 0.7733955383300781, + "learning_rate": 2.1771754379190963e-05, + "loss": 0.2284, + "step": 69240 + }, + { + "epoch": 2.673848411135565, + "grad_norm": 2.198826313018799, + "learning_rate": 2.1746013359589173e-05, + "loss": 0.2463, + "step": 69250 + }, + { + "epoch": 2.6742345264295917, + "grad_norm": 0.9791239500045776, + "learning_rate": 2.172027233998739e-05, + "loss": 0.365, + "step": 69260 + }, + { + "epoch": 2.6746206417236187, + "grad_norm": 1.0145782232284546, + "learning_rate": 2.1694531320385603e-05, + "loss": 0.1168, + "step": 69270 + }, + { + "epoch": 2.6750067570176457, + "grad_norm": 1.506508708000183, + "learning_rate": 2.1668790300783813e-05, + "loss": 0.1254, + "step": 69280 + }, + { + "epoch": 2.675392872311672, + "grad_norm": 0.9859924912452698, + "learning_rate": 2.164304928118203e-05, + "loss": 0.2124, + "step": 69290 + }, + { + "epoch": 2.675778987605699, + "grad_norm": 1.4127247333526611, + "learning_rate": 2.1617308261580243e-05, + "loss": 0.1644, + "step": 69300 + }, + { + "epoch": 2.6761651028997258, + "grad_norm": 0.8753447532653809, + "learning_rate": 2.1591567241978456e-05, + "loss": 0.1948, + "step": 69310 + }, + { + "epoch": 2.6765512181937527, + "grad_norm": 0.18299230933189392, + "learning_rate": 2.156582622237667e-05, + "loss": 0.1042, + "step": 69320 + }, + { + "epoch": 2.6769373334877793, + "grad_norm": 0.9009674191474915, + "learning_rate": 2.1540085202774883e-05, + "loss": 0.0899, + "step": 69330 + }, + { + "epoch": 2.6773234487818063, + "grad_norm": 0.8263937830924988, + "learning_rate": 2.1514344183173096e-05, + "loss": 0.0843, + "step": 69340 + }, + { + "epoch": 2.6777095640758333, + "grad_norm": 0.40176376700401306, + "learning_rate": 2.148860316357131e-05, + "loss": 0.1645, + "step": 69350 + }, + { + "epoch": 2.67809567936986, + "grad_norm": 1.2371177673339844, + "learning_rate": 2.1462862143969523e-05, + "loss": 0.1491, + "step": 69360 + }, + { + "epoch": 2.6784817946638864, + "grad_norm": 0.6874446272850037, + "learning_rate": 2.1437121124367736e-05, + "loss": 0.1887, + "step": 69370 + }, + { + "epoch": 2.6788679099579134, + "grad_norm": 0.08807168155908585, + "learning_rate": 2.1411380104765953e-05, + "loss": 0.1049, + "step": 69380 + }, + { + "epoch": 2.6792540252519403, + "grad_norm": 2.0971579551696777, + "learning_rate": 2.1385639085164163e-05, + "loss": 0.135, + "step": 69390 + }, + { + "epoch": 2.679640140545967, + "grad_norm": 0.9297891855239868, + "learning_rate": 2.135989806556238e-05, + "loss": 0.2356, + "step": 69400 + }, + { + "epoch": 2.680026255839994, + "grad_norm": 2.100465774536133, + "learning_rate": 2.1334157045960593e-05, + "loss": 0.225, + "step": 69410 + }, + { + "epoch": 2.680412371134021, + "grad_norm": 0.14785470068454742, + "learning_rate": 2.1308416026358806e-05, + "loss": 0.1806, + "step": 69420 + }, + { + "epoch": 2.6807984864280474, + "grad_norm": 0.03883717209100723, + "learning_rate": 2.128267500675702e-05, + "loss": 0.0575, + "step": 69430 + }, + { + "epoch": 2.681184601722074, + "grad_norm": 0.515643835067749, + "learning_rate": 2.125693398715523e-05, + "loss": 0.1095, + "step": 69440 + }, + { + "epoch": 2.681570717016101, + "grad_norm": 0.21258410811424255, + "learning_rate": 2.1231192967553446e-05, + "loss": 0.1496, + "step": 69450 + }, + { + "epoch": 2.681956832310128, + "grad_norm": 1.14195716381073, + "learning_rate": 2.120545194795166e-05, + "loss": 0.182, + "step": 69460 + }, + { + "epoch": 2.6823429476041545, + "grad_norm": 0.43386051058769226, + "learning_rate": 2.1179710928349873e-05, + "loss": 0.2241, + "step": 69470 + }, + { + "epoch": 2.6827290628981815, + "grad_norm": 0.4654422700405121, + "learning_rate": 2.1153969908748086e-05, + "loss": 0.2264, + "step": 69480 + }, + { + "epoch": 2.6831151781922085, + "grad_norm": 0.8086020350456238, + "learning_rate": 2.1128228889146303e-05, + "loss": 0.1634, + "step": 69490 + }, + { + "epoch": 2.683501293486235, + "grad_norm": 0.03701169416308403, + "learning_rate": 2.1102487869544512e-05, + "loss": 0.1117, + "step": 69500 + }, + { + "epoch": 2.6838874087802616, + "grad_norm": 0.9567661285400391, + "learning_rate": 2.1076746849942726e-05, + "loss": 0.113, + "step": 69510 + }, + { + "epoch": 2.6842735240742885, + "grad_norm": 1.7322033643722534, + "learning_rate": 2.1051005830340943e-05, + "loss": 0.1443, + "step": 69520 + }, + { + "epoch": 2.6846596393683155, + "grad_norm": 1.8574343919754028, + "learning_rate": 2.1025264810739152e-05, + "loss": 0.0919, + "step": 69530 + }, + { + "epoch": 2.685045754662342, + "grad_norm": 0.0813397541642189, + "learning_rate": 2.099952379113737e-05, + "loss": 0.061, + "step": 69540 + }, + { + "epoch": 2.685431869956369, + "grad_norm": 0.09124821424484253, + "learning_rate": 2.0973782771535582e-05, + "loss": 0.0703, + "step": 69550 + }, + { + "epoch": 2.6858179852503956, + "grad_norm": 1.2731401920318604, + "learning_rate": 2.0948041751933796e-05, + "loss": 0.1788, + "step": 69560 + }, + { + "epoch": 2.6862041005444226, + "grad_norm": 0.38222697377204895, + "learning_rate": 2.092230073233201e-05, + "loss": 0.22, + "step": 69570 + }, + { + "epoch": 2.686590215838449, + "grad_norm": 0.7840344905853271, + "learning_rate": 2.0896559712730222e-05, + "loss": 0.0511, + "step": 69580 + }, + { + "epoch": 2.686976331132476, + "grad_norm": 0.5814514756202698, + "learning_rate": 2.0870818693128436e-05, + "loss": 0.1282, + "step": 69590 + }, + { + "epoch": 2.687362446426503, + "grad_norm": 2.049823045730591, + "learning_rate": 2.084507767352665e-05, + "loss": 0.1928, + "step": 69600 + }, + { + "epoch": 2.6877485617205297, + "grad_norm": 1.726441502571106, + "learning_rate": 2.0819336653924862e-05, + "loss": 0.2271, + "step": 69610 + }, + { + "epoch": 2.6881346770145567, + "grad_norm": 0.6086135506629944, + "learning_rate": 2.0793595634323076e-05, + "loss": 0.0826, + "step": 69620 + }, + { + "epoch": 2.688520792308583, + "grad_norm": 0.025057394057512283, + "learning_rate": 2.0767854614721292e-05, + "loss": 0.0945, + "step": 69630 + }, + { + "epoch": 2.68890690760261, + "grad_norm": 0.5811958312988281, + "learning_rate": 2.0742113595119502e-05, + "loss": 0.115, + "step": 69640 + }, + { + "epoch": 2.6892930228966367, + "grad_norm": 1.4226329326629639, + "learning_rate": 2.071637257551772e-05, + "loss": 0.1394, + "step": 69650 + }, + { + "epoch": 2.6896791381906637, + "grad_norm": 0.06111827492713928, + "learning_rate": 2.0690631555915932e-05, + "loss": 0.0785, + "step": 69660 + }, + { + "epoch": 2.6900652534846907, + "grad_norm": 1.7739264965057373, + "learning_rate": 2.0664890536314142e-05, + "loss": 0.0966, + "step": 69670 + }, + { + "epoch": 2.6904513687787173, + "grad_norm": 0.12530933320522308, + "learning_rate": 2.063914951671236e-05, + "loss": 0.1142, + "step": 69680 + }, + { + "epoch": 2.6908374840727443, + "grad_norm": 0.44947731494903564, + "learning_rate": 2.0613408497110572e-05, + "loss": 0.1069, + "step": 69690 + }, + { + "epoch": 2.691223599366771, + "grad_norm": 1.1874277591705322, + "learning_rate": 2.0587667477508786e-05, + "loss": 0.2498, + "step": 69700 + }, + { + "epoch": 2.691609714660798, + "grad_norm": 0.2770039141178131, + "learning_rate": 2.0561926457907e-05, + "loss": 0.0918, + "step": 69710 + }, + { + "epoch": 2.6919958299548243, + "grad_norm": 0.6367407441139221, + "learning_rate": 2.0536185438305212e-05, + "loss": 0.1444, + "step": 69720 + }, + { + "epoch": 2.6923819452488513, + "grad_norm": 4.309720993041992, + "learning_rate": 2.0510444418703425e-05, + "loss": 0.1751, + "step": 69730 + }, + { + "epoch": 2.6927680605428783, + "grad_norm": 0.19197171926498413, + "learning_rate": 2.048470339910164e-05, + "loss": 0.3638, + "step": 69740 + }, + { + "epoch": 2.693154175836905, + "grad_norm": 1.1299902200698853, + "learning_rate": 2.0458962379499852e-05, + "loss": 0.2047, + "step": 69750 + }, + { + "epoch": 2.693540291130932, + "grad_norm": 2.2639973163604736, + "learning_rate": 2.0433221359898065e-05, + "loss": 0.1683, + "step": 69760 + }, + { + "epoch": 2.6939264064249584, + "grad_norm": 0.7595259547233582, + "learning_rate": 2.0407480340296282e-05, + "loss": 0.1393, + "step": 69770 + }, + { + "epoch": 2.6943125217189854, + "grad_norm": 0.1113772988319397, + "learning_rate": 2.0381739320694492e-05, + "loss": 0.1775, + "step": 69780 + }, + { + "epoch": 2.694698637013012, + "grad_norm": 0.8597696423530579, + "learning_rate": 2.035599830109271e-05, + "loss": 0.092, + "step": 69790 + }, + { + "epoch": 2.695084752307039, + "grad_norm": 0.9622846841812134, + "learning_rate": 2.0330257281490922e-05, + "loss": 0.1407, + "step": 69800 + }, + { + "epoch": 2.695470867601066, + "grad_norm": 1.840627908706665, + "learning_rate": 2.0304516261889135e-05, + "loss": 0.1769, + "step": 69810 + }, + { + "epoch": 2.6958569828950925, + "grad_norm": 0.8552238941192627, + "learning_rate": 2.027877524228735e-05, + "loss": 0.263, + "step": 69820 + }, + { + "epoch": 2.696243098189119, + "grad_norm": 0.16129668056964874, + "learning_rate": 2.025303422268556e-05, + "loss": 0.188, + "step": 69830 + }, + { + "epoch": 2.696629213483146, + "grad_norm": 0.46955424547195435, + "learning_rate": 2.0227293203083775e-05, + "loss": 0.0623, + "step": 69840 + }, + { + "epoch": 2.697015328777173, + "grad_norm": 1.0460457801818848, + "learning_rate": 2.020155218348199e-05, + "loss": 0.1932, + "step": 69850 + }, + { + "epoch": 2.6974014440711995, + "grad_norm": 4.531238555908203, + "learning_rate": 2.0175811163880202e-05, + "loss": 0.323, + "step": 69860 + }, + { + "epoch": 2.6977875593652265, + "grad_norm": 1.188908338546753, + "learning_rate": 2.0150070144278415e-05, + "loss": 0.1389, + "step": 69870 + }, + { + "epoch": 2.6981736746592535, + "grad_norm": 0.3120017349720001, + "learning_rate": 2.0124329124676632e-05, + "loss": 0.1732, + "step": 69880 + }, + { + "epoch": 2.69855978995328, + "grad_norm": 1.2681177854537964, + "learning_rate": 2.0098588105074842e-05, + "loss": 0.1452, + "step": 69890 + }, + { + "epoch": 2.6989459052473066, + "grad_norm": 0.07245191186666489, + "learning_rate": 2.0072847085473055e-05, + "loss": 0.0982, + "step": 69900 + }, + { + "epoch": 2.6993320205413336, + "grad_norm": 1.7727508544921875, + "learning_rate": 2.0047106065871272e-05, + "loss": 0.0806, + "step": 69910 + }, + { + "epoch": 2.6997181358353606, + "grad_norm": 0.6012092232704163, + "learning_rate": 2.0021365046269482e-05, + "loss": 0.1377, + "step": 69920 + }, + { + "epoch": 2.700104251129387, + "grad_norm": 0.6156259775161743, + "learning_rate": 1.99956240266677e-05, + "loss": 0.1788, + "step": 69930 + }, + { + "epoch": 2.700490366423414, + "grad_norm": 1.6917505264282227, + "learning_rate": 1.9969883007065912e-05, + "loss": 0.1588, + "step": 69940 + }, + { + "epoch": 2.700876481717441, + "grad_norm": 2.0406925678253174, + "learning_rate": 1.9944141987464125e-05, + "loss": 0.1541, + "step": 69950 + }, + { + "epoch": 2.7012625970114676, + "grad_norm": 3.067919969558716, + "learning_rate": 1.991840096786234e-05, + "loss": 0.1052, + "step": 69960 + }, + { + "epoch": 2.701648712305494, + "grad_norm": 0.7679221034049988, + "learning_rate": 1.989265994826055e-05, + "loss": 0.2131, + "step": 69970 + }, + { + "epoch": 2.702034827599521, + "grad_norm": 0.9475175738334656, + "learning_rate": 1.9866918928658765e-05, + "loss": 0.1016, + "step": 69980 + }, + { + "epoch": 2.702420942893548, + "grad_norm": 1.2485641241073608, + "learning_rate": 1.984117790905698e-05, + "loss": 0.0922, + "step": 69990 + }, + { + "epoch": 2.7028070581875747, + "grad_norm": 0.9329742789268494, + "learning_rate": 1.981543688945519e-05, + "loss": 0.3189, + "step": 70000 + }, + { + "epoch": 2.7031931734816017, + "grad_norm": 0.6140137314796448, + "learning_rate": 1.9789695869853405e-05, + "loss": 0.1054, + "step": 70010 + }, + { + "epoch": 2.7035792887756283, + "grad_norm": 0.938637375831604, + "learning_rate": 1.976395485025162e-05, + "loss": 0.1276, + "step": 70020 + }, + { + "epoch": 2.7039654040696552, + "grad_norm": 1.2453144788742065, + "learning_rate": 1.973821383064983e-05, + "loss": 0.2376, + "step": 70030 + }, + { + "epoch": 2.704351519363682, + "grad_norm": 0.7409077286720276, + "learning_rate": 1.9712472811048048e-05, + "loss": 0.1269, + "step": 70040 + }, + { + "epoch": 2.704737634657709, + "grad_norm": 1.8768031597137451, + "learning_rate": 1.968673179144626e-05, + "loss": 0.235, + "step": 70050 + }, + { + "epoch": 2.7051237499517358, + "grad_norm": 0.23621875047683716, + "learning_rate": 1.966099077184447e-05, + "loss": 0.1042, + "step": 70060 + }, + { + "epoch": 2.7055098652457623, + "grad_norm": 0.2492808699607849, + "learning_rate": 1.9635249752242688e-05, + "loss": 0.2042, + "step": 70070 + }, + { + "epoch": 2.7058959805397893, + "grad_norm": 1.5175273418426514, + "learning_rate": 1.96095087326409e-05, + "loss": 0.2068, + "step": 70080 + }, + { + "epoch": 2.706282095833816, + "grad_norm": 0.9314035773277283, + "learning_rate": 1.9583767713039115e-05, + "loss": 0.1587, + "step": 70090 + }, + { + "epoch": 2.706668211127843, + "grad_norm": 1.7147942781448364, + "learning_rate": 1.9558026693437328e-05, + "loss": 0.1391, + "step": 70100 + }, + { + "epoch": 2.7070543264218694, + "grad_norm": 0.36276572942733765, + "learning_rate": 1.953228567383554e-05, + "loss": 0.1275, + "step": 70110 + }, + { + "epoch": 2.7074404417158964, + "grad_norm": 0.8741244077682495, + "learning_rate": 1.9506544654233755e-05, + "loss": 0.3746, + "step": 70120 + }, + { + "epoch": 2.7078265570099234, + "grad_norm": 1.3060029745101929, + "learning_rate": 1.9480803634631968e-05, + "loss": 0.1076, + "step": 70130 + }, + { + "epoch": 2.70821267230395, + "grad_norm": 0.10746710747480392, + "learning_rate": 1.945506261503018e-05, + "loss": 0.1494, + "step": 70140 + }, + { + "epoch": 2.708598787597977, + "grad_norm": 1.486136555671692, + "learning_rate": 1.9429321595428395e-05, + "loss": 0.1223, + "step": 70150 + }, + { + "epoch": 2.7089849028920034, + "grad_norm": 1.184791088104248, + "learning_rate": 1.940358057582661e-05, + "loss": 0.1055, + "step": 70160 + }, + { + "epoch": 2.7093710181860304, + "grad_norm": 0.10033337771892548, + "learning_rate": 1.937783955622482e-05, + "loss": 0.083, + "step": 70170 + }, + { + "epoch": 2.709757133480057, + "grad_norm": 1.3200103044509888, + "learning_rate": 1.9352098536623038e-05, + "loss": 0.2181, + "step": 70180 + }, + { + "epoch": 2.710143248774084, + "grad_norm": 0.013393727131187916, + "learning_rate": 1.932635751702125e-05, + "loss": 0.2415, + "step": 70190 + }, + { + "epoch": 2.710529364068111, + "grad_norm": 0.5986078381538391, + "learning_rate": 1.9300616497419465e-05, + "loss": 0.3203, + "step": 70200 + }, + { + "epoch": 2.7109154793621375, + "grad_norm": 0.18333138525485992, + "learning_rate": 1.9274875477817678e-05, + "loss": 0.1043, + "step": 70210 + }, + { + "epoch": 2.7113015946561645, + "grad_norm": 1.0568320751190186, + "learning_rate": 1.924913445821589e-05, + "loss": 0.1491, + "step": 70220 + }, + { + "epoch": 2.711687709950191, + "grad_norm": 4.519015312194824, + "learning_rate": 1.9223393438614105e-05, + "loss": 0.2732, + "step": 70230 + }, + { + "epoch": 2.712073825244218, + "grad_norm": 0.753288209438324, + "learning_rate": 1.9197652419012318e-05, + "loss": 0.1793, + "step": 70240 + }, + { + "epoch": 2.7124599405382446, + "grad_norm": 0.8923632502555847, + "learning_rate": 1.917191139941053e-05, + "loss": 0.3293, + "step": 70250 + }, + { + "epoch": 2.7128460558322716, + "grad_norm": 1.5100219249725342, + "learning_rate": 1.9146170379808744e-05, + "loss": 0.2252, + "step": 70260 + }, + { + "epoch": 2.7132321711262986, + "grad_norm": 3.496548891067505, + "learning_rate": 1.912042936020696e-05, + "loss": 0.3877, + "step": 70270 + }, + { + "epoch": 2.713618286420325, + "grad_norm": 1.0033410787582397, + "learning_rate": 1.909468834060517e-05, + "loss": 0.1854, + "step": 70280 + }, + { + "epoch": 2.7140044017143516, + "grad_norm": 2.1113343238830566, + "learning_rate": 1.9068947321003384e-05, + "loss": 0.2561, + "step": 70290 + }, + { + "epoch": 2.7143905170083786, + "grad_norm": 3.4603283405303955, + "learning_rate": 1.90432063014016e-05, + "loss": 0.2673, + "step": 70300 + }, + { + "epoch": 2.7147766323024056, + "grad_norm": 0.4921800196170807, + "learning_rate": 1.901746528179981e-05, + "loss": 0.1819, + "step": 70310 + }, + { + "epoch": 2.715162747596432, + "grad_norm": 0.12639844417572021, + "learning_rate": 1.8991724262198028e-05, + "loss": 0.1302, + "step": 70320 + }, + { + "epoch": 2.715548862890459, + "grad_norm": 0.988345205783844, + "learning_rate": 1.896598324259624e-05, + "loss": 0.2827, + "step": 70330 + }, + { + "epoch": 2.715934978184486, + "grad_norm": 1.432824730873108, + "learning_rate": 1.8940242222994454e-05, + "loss": 0.1046, + "step": 70340 + }, + { + "epoch": 2.7163210934785127, + "grad_norm": 0.4591884911060333, + "learning_rate": 1.8914501203392668e-05, + "loss": 0.2663, + "step": 70350 + }, + { + "epoch": 2.7167072087725392, + "grad_norm": 0.23119209706783295, + "learning_rate": 1.888876018379088e-05, + "loss": 0.3555, + "step": 70360 + }, + { + "epoch": 2.7170933240665662, + "grad_norm": 1.9221980571746826, + "learning_rate": 1.8863019164189094e-05, + "loss": 0.1279, + "step": 70370 + }, + { + "epoch": 2.717479439360593, + "grad_norm": 0.09880539029836655, + "learning_rate": 1.8837278144587308e-05, + "loss": 0.0905, + "step": 70380 + }, + { + "epoch": 2.7178655546546198, + "grad_norm": 0.16270965337753296, + "learning_rate": 1.881153712498552e-05, + "loss": 0.1619, + "step": 70390 + }, + { + "epoch": 2.7182516699486468, + "grad_norm": 2.006127119064331, + "learning_rate": 1.8785796105383734e-05, + "loss": 0.107, + "step": 70400 + }, + { + "epoch": 2.7186377852426733, + "grad_norm": 3.8377106189727783, + "learning_rate": 1.876005508578195e-05, + "loss": 0.1109, + "step": 70410 + }, + { + "epoch": 2.7190239005367003, + "grad_norm": 0.5417147278785706, + "learning_rate": 1.873431406618016e-05, + "loss": 0.0836, + "step": 70420 + }, + { + "epoch": 2.719410015830727, + "grad_norm": 0.2515392601490021, + "learning_rate": 1.8708573046578378e-05, + "loss": 0.1107, + "step": 70430 + }, + { + "epoch": 2.719796131124754, + "grad_norm": 1.813265323638916, + "learning_rate": 1.868283202697659e-05, + "loss": 0.125, + "step": 70440 + }, + { + "epoch": 2.720182246418781, + "grad_norm": 1.873964548110962, + "learning_rate": 1.86570910073748e-05, + "loss": 0.1649, + "step": 70450 + }, + { + "epoch": 2.7205683617128074, + "grad_norm": 0.23251821100711823, + "learning_rate": 1.8631349987773017e-05, + "loss": 0.1394, + "step": 70460 + }, + { + "epoch": 2.7209544770068343, + "grad_norm": 3.282196521759033, + "learning_rate": 1.860560896817123e-05, + "loss": 0.3037, + "step": 70470 + }, + { + "epoch": 2.721340592300861, + "grad_norm": 1.481994867324829, + "learning_rate": 1.8579867948569444e-05, + "loss": 0.194, + "step": 70480 + }, + { + "epoch": 2.721726707594888, + "grad_norm": 2.3261420726776123, + "learning_rate": 1.8554126928967657e-05, + "loss": 0.2485, + "step": 70490 + }, + { + "epoch": 2.7221128228889144, + "grad_norm": 0.7159029841423035, + "learning_rate": 1.852838590936587e-05, + "loss": 0.0882, + "step": 70500 + }, + { + "epoch": 2.7224989381829414, + "grad_norm": 1.13942289352417, + "learning_rate": 1.8502644889764084e-05, + "loss": 0.2973, + "step": 70510 + }, + { + "epoch": 2.7228850534769684, + "grad_norm": 0.5620355606079102, + "learning_rate": 1.8476903870162297e-05, + "loss": 0.1388, + "step": 70520 + }, + { + "epoch": 2.723271168770995, + "grad_norm": 0.3864080607891083, + "learning_rate": 1.845116285056051e-05, + "loss": 0.3104, + "step": 70530 + }, + { + "epoch": 2.723657284065022, + "grad_norm": 0.19849848747253418, + "learning_rate": 1.8425421830958724e-05, + "loss": 0.1077, + "step": 70540 + }, + { + "epoch": 2.7240433993590485, + "grad_norm": 0.5163066983222961, + "learning_rate": 1.839968081135694e-05, + "loss": 0.1412, + "step": 70550 + }, + { + "epoch": 2.7244295146530755, + "grad_norm": 2.6426632404327393, + "learning_rate": 1.837393979175515e-05, + "loss": 0.1635, + "step": 70560 + }, + { + "epoch": 2.724815629947102, + "grad_norm": 0.09032654017210007, + "learning_rate": 1.8348198772153367e-05, + "loss": 0.1136, + "step": 70570 + }, + { + "epoch": 2.725201745241129, + "grad_norm": 0.20428933203220367, + "learning_rate": 1.832245775255158e-05, + "loss": 0.0685, + "step": 70580 + }, + { + "epoch": 2.725587860535156, + "grad_norm": 0.16030457615852356, + "learning_rate": 1.8296716732949794e-05, + "loss": 0.236, + "step": 70590 + }, + { + "epoch": 2.7259739758291825, + "grad_norm": 0.4269642233848572, + "learning_rate": 1.8270975713348007e-05, + "loss": 0.1099, + "step": 70600 + }, + { + "epoch": 2.7263600911232095, + "grad_norm": 0.821434736251831, + "learning_rate": 1.824523469374622e-05, + "loss": 0.0975, + "step": 70610 + }, + { + "epoch": 2.726746206417236, + "grad_norm": 0.6720656156539917, + "learning_rate": 1.8219493674144434e-05, + "loss": 0.286, + "step": 70620 + }, + { + "epoch": 2.727132321711263, + "grad_norm": 1.6225451231002808, + "learning_rate": 1.8193752654542647e-05, + "loss": 0.1615, + "step": 70630 + }, + { + "epoch": 2.7275184370052896, + "grad_norm": 2.2176315784454346, + "learning_rate": 1.816801163494086e-05, + "loss": 0.2519, + "step": 70640 + }, + { + "epoch": 2.7279045522993166, + "grad_norm": 0.7203749418258667, + "learning_rate": 1.8142270615339074e-05, + "loss": 0.0635, + "step": 70650 + }, + { + "epoch": 2.7282906675933436, + "grad_norm": 1.0711476802825928, + "learning_rate": 1.8116529595737287e-05, + "loss": 0.1388, + "step": 70660 + }, + { + "epoch": 2.72867678288737, + "grad_norm": 0.3595193922519684, + "learning_rate": 1.80907885761355e-05, + "loss": 0.3019, + "step": 70670 + }, + { + "epoch": 2.7290628981813967, + "grad_norm": 2.369717597961426, + "learning_rate": 1.8065047556533714e-05, + "loss": 0.202, + "step": 70680 + }, + { + "epoch": 2.7294490134754237, + "grad_norm": 0.9249016046524048, + "learning_rate": 1.803930653693193e-05, + "loss": 0.1571, + "step": 70690 + }, + { + "epoch": 2.7298351287694507, + "grad_norm": 1.1907705068588257, + "learning_rate": 1.801356551733014e-05, + "loss": 0.1, + "step": 70700 + }, + { + "epoch": 2.730221244063477, + "grad_norm": 1.934824824333191, + "learning_rate": 1.7987824497728357e-05, + "loss": 0.121, + "step": 70710 + }, + { + "epoch": 2.730607359357504, + "grad_norm": 0.8659215569496155, + "learning_rate": 1.796208347812657e-05, + "loss": 0.1184, + "step": 70720 + }, + { + "epoch": 2.730993474651531, + "grad_norm": 4.406744956970215, + "learning_rate": 1.7936342458524784e-05, + "loss": 0.1405, + "step": 70730 + }, + { + "epoch": 2.7313795899455577, + "grad_norm": 1.3139662742614746, + "learning_rate": 1.7910601438922997e-05, + "loss": 0.1341, + "step": 70740 + }, + { + "epoch": 2.7317657052395843, + "grad_norm": 1.3049808740615845, + "learning_rate": 1.788486041932121e-05, + "loss": 0.096, + "step": 70750 + }, + { + "epoch": 2.7321518205336113, + "grad_norm": 0.05369478836655617, + "learning_rate": 1.7859119399719424e-05, + "loss": 0.1148, + "step": 70760 + }, + { + "epoch": 2.7325379358276383, + "grad_norm": 0.028254307806491852, + "learning_rate": 1.7833378380117637e-05, + "loss": 0.1491, + "step": 70770 + }, + { + "epoch": 2.732924051121665, + "grad_norm": 0.44366562366485596, + "learning_rate": 1.780763736051585e-05, + "loss": 0.1294, + "step": 70780 + }, + { + "epoch": 2.733310166415692, + "grad_norm": 1.80902099609375, + "learning_rate": 1.7781896340914064e-05, + "loss": 0.4409, + "step": 70790 + }, + { + "epoch": 2.733696281709719, + "grad_norm": 0.48230284452438354, + "learning_rate": 1.775615532131228e-05, + "loss": 0.1516, + "step": 70800 + }, + { + "epoch": 2.7340823970037453, + "grad_norm": 1.295810341835022, + "learning_rate": 1.773041430171049e-05, + "loss": 0.1375, + "step": 70810 + }, + { + "epoch": 2.734468512297772, + "grad_norm": 0.05213301628828049, + "learning_rate": 1.7704673282108707e-05, + "loss": 0.1678, + "step": 70820 + }, + { + "epoch": 2.734854627591799, + "grad_norm": 1.5852004289627075, + "learning_rate": 1.767893226250692e-05, + "loss": 0.2333, + "step": 70830 + }, + { + "epoch": 2.735240742885826, + "grad_norm": 0.05190286785364151, + "learning_rate": 1.765319124290513e-05, + "loss": 0.1131, + "step": 70840 + }, + { + "epoch": 2.7356268581798524, + "grad_norm": 0.9715459942817688, + "learning_rate": 1.7627450223303347e-05, + "loss": 0.2, + "step": 70850 + }, + { + "epoch": 2.7360129734738794, + "grad_norm": 1.0015023946762085, + "learning_rate": 1.760170920370156e-05, + "loss": 0.1492, + "step": 70860 + }, + { + "epoch": 2.736399088767906, + "grad_norm": 0.4785858392715454, + "learning_rate": 1.7575968184099773e-05, + "loss": 0.1157, + "step": 70870 + }, + { + "epoch": 2.736785204061933, + "grad_norm": 1.2634512186050415, + "learning_rate": 1.7550227164497987e-05, + "loss": 0.196, + "step": 70880 + }, + { + "epoch": 2.7371713193559595, + "grad_norm": 0.08982031047344208, + "learning_rate": 1.75244861448962e-05, + "loss": 0.1772, + "step": 70890 + }, + { + "epoch": 2.7375574346499865, + "grad_norm": 0.2539536952972412, + "learning_rate": 1.7498745125294413e-05, + "loss": 0.2373, + "step": 70900 + }, + { + "epoch": 2.7379435499440135, + "grad_norm": 1.94331955909729, + "learning_rate": 1.7473004105692627e-05, + "loss": 0.1113, + "step": 70910 + }, + { + "epoch": 2.73832966523804, + "grad_norm": 1.7793807983398438, + "learning_rate": 1.744726308609084e-05, + "loss": 0.1222, + "step": 70920 + }, + { + "epoch": 2.738715780532067, + "grad_norm": 0.9183433055877686, + "learning_rate": 1.7421522066489053e-05, + "loss": 0.0982, + "step": 70930 + }, + { + "epoch": 2.7391018958260935, + "grad_norm": 0.7785767316818237, + "learning_rate": 1.739578104688727e-05, + "loss": 0.2077, + "step": 70940 + }, + { + "epoch": 2.7394880111201205, + "grad_norm": 0.363359272480011, + "learning_rate": 1.737004002728548e-05, + "loss": 0.2365, + "step": 70950 + }, + { + "epoch": 2.739874126414147, + "grad_norm": 0.026698095723986626, + "learning_rate": 1.7344299007683697e-05, + "loss": 0.185, + "step": 70960 + }, + { + "epoch": 2.740260241708174, + "grad_norm": 0.3695981502532959, + "learning_rate": 1.731855798808191e-05, + "loss": 0.1889, + "step": 70970 + }, + { + "epoch": 2.740646357002201, + "grad_norm": 0.43547266721725464, + "learning_rate": 1.7292816968480123e-05, + "loss": 0.1945, + "step": 70980 + }, + { + "epoch": 2.7410324722962276, + "grad_norm": 0.815490186214447, + "learning_rate": 1.7267075948878337e-05, + "loss": 0.1461, + "step": 70990 + }, + { + "epoch": 2.7414185875902546, + "grad_norm": 0.9073535799980164, + "learning_rate": 1.724133492927655e-05, + "loss": 0.3003, + "step": 71000 + }, + { + "epoch": 2.741804702884281, + "grad_norm": 1.8418182134628296, + "learning_rate": 1.7215593909674763e-05, + "loss": 0.1591, + "step": 71010 + }, + { + "epoch": 2.742190818178308, + "grad_norm": 0.6584638953208923, + "learning_rate": 1.7189852890072976e-05, + "loss": 0.0845, + "step": 71020 + }, + { + "epoch": 2.7425769334723347, + "grad_norm": 0.15774297714233398, + "learning_rate": 1.716411187047119e-05, + "loss": 0.185, + "step": 71030 + }, + { + "epoch": 2.7429630487663617, + "grad_norm": 1.1900436878204346, + "learning_rate": 1.7138370850869403e-05, + "loss": 0.2387, + "step": 71040 + }, + { + "epoch": 2.7433491640603886, + "grad_norm": 1.486275553703308, + "learning_rate": 1.7112629831267616e-05, + "loss": 0.1802, + "step": 71050 + }, + { + "epoch": 2.743735279354415, + "grad_norm": 1.8878792524337769, + "learning_rate": 1.708688881166583e-05, + "loss": 0.1771, + "step": 71060 + }, + { + "epoch": 2.744121394648442, + "grad_norm": 0.4045495390892029, + "learning_rate": 1.7061147792064043e-05, + "loss": 0.0617, + "step": 71070 + }, + { + "epoch": 2.7445075099424687, + "grad_norm": 1.6910227537155151, + "learning_rate": 1.703540677246226e-05, + "loss": 0.2295, + "step": 71080 + }, + { + "epoch": 2.7448936252364957, + "grad_norm": 1.3064563274383545, + "learning_rate": 1.700966575286047e-05, + "loss": 0.189, + "step": 71090 + }, + { + "epoch": 2.7452797405305223, + "grad_norm": 0.26879480481147766, + "learning_rate": 1.6983924733258686e-05, + "loss": 0.2041, + "step": 71100 + }, + { + "epoch": 2.7456658558245493, + "grad_norm": 0.09380711615085602, + "learning_rate": 1.69581837136569e-05, + "loss": 0.2032, + "step": 71110 + }, + { + "epoch": 2.7460519711185762, + "grad_norm": 0.4047906696796417, + "learning_rate": 1.6932442694055113e-05, + "loss": 0.1808, + "step": 71120 + }, + { + "epoch": 2.746438086412603, + "grad_norm": 0.14023207128047943, + "learning_rate": 1.6906701674453326e-05, + "loss": 0.0477, + "step": 71130 + }, + { + "epoch": 2.7468242017066293, + "grad_norm": 0.09656676650047302, + "learning_rate": 1.688096065485154e-05, + "loss": 0.1482, + "step": 71140 + }, + { + "epoch": 2.7472103170006563, + "grad_norm": 0.62394118309021, + "learning_rate": 1.6855219635249753e-05, + "loss": 0.0637, + "step": 71150 + }, + { + "epoch": 2.7475964322946833, + "grad_norm": 0.028015749529004097, + "learning_rate": 1.6829478615647966e-05, + "loss": 0.1396, + "step": 71160 + }, + { + "epoch": 2.74798254758871, + "grad_norm": 0.6252595782279968, + "learning_rate": 1.680373759604618e-05, + "loss": 0.1989, + "step": 71170 + }, + { + "epoch": 2.748368662882737, + "grad_norm": 1.6278966665267944, + "learning_rate": 1.6777996576444393e-05, + "loss": 0.2828, + "step": 71180 + }, + { + "epoch": 2.748754778176764, + "grad_norm": 0.7790352702140808, + "learning_rate": 1.675225555684261e-05, + "loss": 0.1542, + "step": 71190 + }, + { + "epoch": 2.7491408934707904, + "grad_norm": 0.41583356261253357, + "learning_rate": 1.672651453724082e-05, + "loss": 0.1853, + "step": 71200 + }, + { + "epoch": 2.749527008764817, + "grad_norm": 0.22601169347763062, + "learning_rate": 1.6700773517639036e-05, + "loss": 0.1429, + "step": 71210 + }, + { + "epoch": 2.749913124058844, + "grad_norm": 0.7268449068069458, + "learning_rate": 1.667503249803725e-05, + "loss": 0.156, + "step": 71220 + }, + { + "epoch": 2.750299239352871, + "grad_norm": 0.8059683442115784, + "learning_rate": 1.664929147843546e-05, + "loss": 0.1616, + "step": 71230 + }, + { + "epoch": 2.7506853546468975, + "grad_norm": 1.2625138759613037, + "learning_rate": 1.6623550458833676e-05, + "loss": 0.115, + "step": 71240 + }, + { + "epoch": 2.7510714699409244, + "grad_norm": 0.4710187315940857, + "learning_rate": 1.659780943923189e-05, + "loss": 0.1266, + "step": 71250 + }, + { + "epoch": 2.7514575852349514, + "grad_norm": 1.3373891115188599, + "learning_rate": 1.6572068419630103e-05, + "loss": 0.1423, + "step": 71260 + }, + { + "epoch": 2.751843700528978, + "grad_norm": 0.5606533288955688, + "learning_rate": 1.6546327400028316e-05, + "loss": 0.0912, + "step": 71270 + }, + { + "epoch": 2.7522298158230045, + "grad_norm": 1.2201918363571167, + "learning_rate": 1.652058638042653e-05, + "loss": 0.2536, + "step": 71280 + }, + { + "epoch": 2.7526159311170315, + "grad_norm": 1.017829418182373, + "learning_rate": 1.6494845360824743e-05, + "loss": 0.0917, + "step": 71290 + }, + { + "epoch": 2.7530020464110585, + "grad_norm": 0.6897231340408325, + "learning_rate": 1.6469104341222956e-05, + "loss": 0.1222, + "step": 71300 + }, + { + "epoch": 2.753388161705085, + "grad_norm": 5.349620342254639, + "learning_rate": 1.644336332162117e-05, + "loss": 0.1121, + "step": 71310 + }, + { + "epoch": 2.753774276999112, + "grad_norm": 1.6144895553588867, + "learning_rate": 1.6417622302019383e-05, + "loss": 0.146, + "step": 71320 + }, + { + "epoch": 2.7541603922931386, + "grad_norm": 0.5989459753036499, + "learning_rate": 1.63918812824176e-05, + "loss": 0.0598, + "step": 71330 + }, + { + "epoch": 2.7545465075871656, + "grad_norm": 0.7278910279273987, + "learning_rate": 1.636614026281581e-05, + "loss": 0.1559, + "step": 71340 + }, + { + "epoch": 2.754932622881192, + "grad_norm": 1.0088047981262207, + "learning_rate": 1.6340399243214026e-05, + "loss": 0.0795, + "step": 71350 + }, + { + "epoch": 2.755318738175219, + "grad_norm": 1.3781206607818604, + "learning_rate": 1.631465822361224e-05, + "loss": 0.0628, + "step": 71360 + }, + { + "epoch": 2.755704853469246, + "grad_norm": 1.498246431350708, + "learning_rate": 1.6288917204010453e-05, + "loss": 0.1908, + "step": 71370 + }, + { + "epoch": 2.7560909687632726, + "grad_norm": 0.2840694785118103, + "learning_rate": 1.6263176184408666e-05, + "loss": 0.0357, + "step": 71380 + }, + { + "epoch": 2.7564770840572996, + "grad_norm": 0.6458057761192322, + "learning_rate": 1.623743516480688e-05, + "loss": 0.215, + "step": 71390 + }, + { + "epoch": 2.756863199351326, + "grad_norm": 0.42117947340011597, + "learning_rate": 1.6211694145205092e-05, + "loss": 0.1362, + "step": 71400 + }, + { + "epoch": 2.757249314645353, + "grad_norm": 3.0162267684936523, + "learning_rate": 1.6185953125603306e-05, + "loss": 0.1965, + "step": 71410 + }, + { + "epoch": 2.7576354299393797, + "grad_norm": 0.930474579334259, + "learning_rate": 1.616021210600152e-05, + "loss": 0.2395, + "step": 71420 + }, + { + "epoch": 2.7580215452334067, + "grad_norm": 0.7894459366798401, + "learning_rate": 1.6134471086399732e-05, + "loss": 0.2285, + "step": 71430 + }, + { + "epoch": 2.7584076605274337, + "grad_norm": 3.2045016288757324, + "learning_rate": 1.6108730066797946e-05, + "loss": 0.1819, + "step": 71440 + }, + { + "epoch": 2.7587937758214602, + "grad_norm": 0.7269306778907776, + "learning_rate": 1.608298904719616e-05, + "loss": 0.1594, + "step": 71450 + }, + { + "epoch": 2.7591798911154872, + "grad_norm": 1.333078145980835, + "learning_rate": 1.6057248027594372e-05, + "loss": 0.2131, + "step": 71460 + }, + { + "epoch": 2.7595660064095138, + "grad_norm": 0.6923009753227234, + "learning_rate": 1.603150700799259e-05, + "loss": 0.1174, + "step": 71470 + }, + { + "epoch": 2.7599521217035408, + "grad_norm": 3.510756254196167, + "learning_rate": 1.60057659883908e-05, + "loss": 0.1368, + "step": 71480 + }, + { + "epoch": 2.7603382369975673, + "grad_norm": 0.035045500844717026, + "learning_rate": 1.5980024968789016e-05, + "loss": 0.2163, + "step": 71490 + }, + { + "epoch": 2.7607243522915943, + "grad_norm": 0.9699954390525818, + "learning_rate": 1.595428394918723e-05, + "loss": 0.1279, + "step": 71500 + }, + { + "epoch": 2.7611104675856213, + "grad_norm": 1.3624379634857178, + "learning_rate": 1.5928542929585442e-05, + "loss": 0.1458, + "step": 71510 + }, + { + "epoch": 2.761496582879648, + "grad_norm": 2.585167646408081, + "learning_rate": 1.5902801909983656e-05, + "loss": 0.2321, + "step": 71520 + }, + { + "epoch": 2.761882698173675, + "grad_norm": 2.018916130065918, + "learning_rate": 1.587706089038187e-05, + "loss": 0.1301, + "step": 71530 + }, + { + "epoch": 2.7622688134677014, + "grad_norm": 0.1349097490310669, + "learning_rate": 1.5851319870780082e-05, + "loss": 0.1449, + "step": 71540 + }, + { + "epoch": 2.7626549287617284, + "grad_norm": 0.651360273361206, + "learning_rate": 1.5825578851178296e-05, + "loss": 0.0588, + "step": 71550 + }, + { + "epoch": 2.763041044055755, + "grad_norm": 1.081132411956787, + "learning_rate": 1.579983783157651e-05, + "loss": 0.2273, + "step": 71560 + }, + { + "epoch": 2.763427159349782, + "grad_norm": 0.9000619649887085, + "learning_rate": 1.5774096811974722e-05, + "loss": 0.126, + "step": 71570 + }, + { + "epoch": 2.763813274643809, + "grad_norm": 1.7903470993041992, + "learning_rate": 1.574835579237294e-05, + "loss": 0.1583, + "step": 71580 + }, + { + "epoch": 2.7641993899378354, + "grad_norm": 0.5552549362182617, + "learning_rate": 1.572261477277115e-05, + "loss": 0.1613, + "step": 71590 + }, + { + "epoch": 2.764585505231862, + "grad_norm": 0.24077007174491882, + "learning_rate": 1.5696873753169365e-05, + "loss": 0.1225, + "step": 71600 + }, + { + "epoch": 2.764971620525889, + "grad_norm": 2.2357699871063232, + "learning_rate": 1.567113273356758e-05, + "loss": 0.1241, + "step": 71610 + }, + { + "epoch": 2.765357735819916, + "grad_norm": 0.49114760756492615, + "learning_rate": 1.564539171396579e-05, + "loss": 0.3685, + "step": 71620 + }, + { + "epoch": 2.7657438511139425, + "grad_norm": 0.5270382761955261, + "learning_rate": 1.5619650694364005e-05, + "loss": 0.231, + "step": 71630 + }, + { + "epoch": 2.7661299664079695, + "grad_norm": 1.494850993156433, + "learning_rate": 1.559390967476222e-05, + "loss": 0.1456, + "step": 71640 + }, + { + "epoch": 2.7665160817019965, + "grad_norm": 0.32450973987579346, + "learning_rate": 1.5568168655160432e-05, + "loss": 0.2541, + "step": 71650 + }, + { + "epoch": 2.766902196996023, + "grad_norm": 0.1868717074394226, + "learning_rate": 1.5542427635558645e-05, + "loss": 0.2315, + "step": 71660 + }, + { + "epoch": 2.7672883122900496, + "grad_norm": 1.8473451137542725, + "learning_rate": 1.551668661595686e-05, + "loss": 0.2918, + "step": 71670 + }, + { + "epoch": 2.7676744275840766, + "grad_norm": 1.959158182144165, + "learning_rate": 1.5490945596355072e-05, + "loss": 0.1368, + "step": 71680 + }, + { + "epoch": 2.7680605428781035, + "grad_norm": 1.8263370990753174, + "learning_rate": 1.5465204576753285e-05, + "loss": 0.2409, + "step": 71690 + }, + { + "epoch": 2.76844665817213, + "grad_norm": 0.5502326488494873, + "learning_rate": 1.54394635571515e-05, + "loss": 0.1233, + "step": 71700 + }, + { + "epoch": 2.768832773466157, + "grad_norm": 1.971820592880249, + "learning_rate": 1.5413722537549712e-05, + "loss": 0.2207, + "step": 71710 + }, + { + "epoch": 2.7692188887601836, + "grad_norm": 1.1263493299484253, + "learning_rate": 1.538798151794793e-05, + "loss": 0.1296, + "step": 71720 + }, + { + "epoch": 2.7696050040542106, + "grad_norm": 0.28595641255378723, + "learning_rate": 1.536224049834614e-05, + "loss": 0.1539, + "step": 71730 + }, + { + "epoch": 2.769991119348237, + "grad_norm": 1.1686561107635498, + "learning_rate": 1.5336499478744355e-05, + "loss": 0.2165, + "step": 71740 + }, + { + "epoch": 2.770377234642264, + "grad_norm": 0.6559491157531738, + "learning_rate": 1.531075845914257e-05, + "loss": 0.1464, + "step": 71750 + }, + { + "epoch": 2.770763349936291, + "grad_norm": 0.5865970253944397, + "learning_rate": 1.5285017439540782e-05, + "loss": 0.1076, + "step": 71760 + }, + { + "epoch": 2.7711494652303177, + "grad_norm": 1.1443376541137695, + "learning_rate": 1.5259276419938995e-05, + "loss": 0.1497, + "step": 71770 + }, + { + "epoch": 2.7715355805243447, + "grad_norm": 1.307176947593689, + "learning_rate": 1.523353540033721e-05, + "loss": 0.1435, + "step": 71780 + }, + { + "epoch": 2.7719216958183712, + "grad_norm": 0.014405300840735435, + "learning_rate": 1.5207794380735422e-05, + "loss": 0.1249, + "step": 71790 + }, + { + "epoch": 2.772307811112398, + "grad_norm": 1.4182642698287964, + "learning_rate": 1.5182053361133635e-05, + "loss": 0.1405, + "step": 71800 + }, + { + "epoch": 2.7726939264064248, + "grad_norm": 0.27843913435935974, + "learning_rate": 1.5156312341531847e-05, + "loss": 0.2303, + "step": 71810 + }, + { + "epoch": 2.7730800417004517, + "grad_norm": 2.4468517303466797, + "learning_rate": 1.5130571321930062e-05, + "loss": 0.1322, + "step": 71820 + }, + { + "epoch": 2.7734661569944787, + "grad_norm": 1.2800359725952148, + "learning_rate": 1.5104830302328277e-05, + "loss": 0.1375, + "step": 71830 + }, + { + "epoch": 2.7738522722885053, + "grad_norm": 0.3998767137527466, + "learning_rate": 1.5079089282726488e-05, + "loss": 0.1066, + "step": 71840 + }, + { + "epoch": 2.7742383875825323, + "grad_norm": 0.5334371328353882, + "learning_rate": 1.5053348263124703e-05, + "loss": 0.1777, + "step": 71850 + }, + { + "epoch": 2.774624502876559, + "grad_norm": 0.4894556403160095, + "learning_rate": 1.5027607243522918e-05, + "loss": 0.1134, + "step": 71860 + }, + { + "epoch": 2.775010618170586, + "grad_norm": 0.6681411266326904, + "learning_rate": 1.500186622392113e-05, + "loss": 0.1274, + "step": 71870 + }, + { + "epoch": 2.7753967334646124, + "grad_norm": 0.8148763179779053, + "learning_rate": 1.4976125204319343e-05, + "loss": 0.184, + "step": 71880 + }, + { + "epoch": 2.7757828487586393, + "grad_norm": 2.98481822013855, + "learning_rate": 1.4950384184717558e-05, + "loss": 0.1414, + "step": 71890 + }, + { + "epoch": 2.7761689640526663, + "grad_norm": 0.24513117969036102, + "learning_rate": 1.492464316511577e-05, + "loss": 0.1361, + "step": 71900 + }, + { + "epoch": 2.776555079346693, + "grad_norm": 1.320607304573059, + "learning_rate": 1.4898902145513985e-05, + "loss": 0.159, + "step": 71910 + }, + { + "epoch": 2.77694119464072, + "grad_norm": 0.09256679564714432, + "learning_rate": 1.48731611259122e-05, + "loss": 0.1017, + "step": 71920 + }, + { + "epoch": 2.7773273099347464, + "grad_norm": 0.888762891292572, + "learning_rate": 1.4847420106310412e-05, + "loss": 0.1709, + "step": 71930 + }, + { + "epoch": 2.7777134252287734, + "grad_norm": 1.0178054571151733, + "learning_rate": 1.4821679086708627e-05, + "loss": 0.1705, + "step": 71940 + }, + { + "epoch": 2.7780995405228, + "grad_norm": 1.133257508277893, + "learning_rate": 1.4795938067106838e-05, + "loss": 0.1384, + "step": 71950 + }, + { + "epoch": 2.778485655816827, + "grad_norm": 0.41183799505233765, + "learning_rate": 1.4770197047505051e-05, + "loss": 0.1516, + "step": 71960 + }, + { + "epoch": 2.778871771110854, + "grad_norm": 2.172168731689453, + "learning_rate": 1.4744456027903266e-05, + "loss": 0.1806, + "step": 71970 + }, + { + "epoch": 2.7792578864048805, + "grad_norm": 0.4734342098236084, + "learning_rate": 1.4718715008301478e-05, + "loss": 0.093, + "step": 71980 + }, + { + "epoch": 2.779644001698907, + "grad_norm": 0.38913142681121826, + "learning_rate": 1.4692973988699693e-05, + "loss": 0.1117, + "step": 71990 + }, + { + "epoch": 2.780030116992934, + "grad_norm": 1.2493480443954468, + "learning_rate": 1.4667232969097908e-05, + "loss": 0.2148, + "step": 72000 + }, + { + "epoch": 2.780416232286961, + "grad_norm": 0.6025747060775757, + "learning_rate": 1.464149194949612e-05, + "loss": 0.1705, + "step": 72010 + }, + { + "epoch": 2.7808023475809875, + "grad_norm": 1.4697037935256958, + "learning_rate": 1.4615750929894335e-05, + "loss": 0.1062, + "step": 72020 + }, + { + "epoch": 2.7811884628750145, + "grad_norm": 0.40200480818748474, + "learning_rate": 1.4590009910292548e-05, + "loss": 0.1152, + "step": 72030 + }, + { + "epoch": 2.7815745781690415, + "grad_norm": 0.5441505908966064, + "learning_rate": 1.456426889069076e-05, + "loss": 0.1488, + "step": 72040 + }, + { + "epoch": 2.781960693463068, + "grad_norm": 0.45176851749420166, + "learning_rate": 1.4538527871088975e-05, + "loss": 0.0965, + "step": 72050 + }, + { + "epoch": 2.7823468087570946, + "grad_norm": 0.7421501278877258, + "learning_rate": 1.4512786851487186e-05, + "loss": 0.1045, + "step": 72060 + }, + { + "epoch": 2.7827329240511216, + "grad_norm": 0.9444339871406555, + "learning_rate": 1.4487045831885401e-05, + "loss": 0.164, + "step": 72070 + }, + { + "epoch": 2.7831190393451486, + "grad_norm": 1.1744027137756348, + "learning_rate": 1.4461304812283616e-05, + "loss": 0.1624, + "step": 72080 + }, + { + "epoch": 2.783505154639175, + "grad_norm": 1.2394273281097412, + "learning_rate": 1.4435563792681828e-05, + "loss": 0.2018, + "step": 72090 + }, + { + "epoch": 2.783891269933202, + "grad_norm": 1.3852754831314087, + "learning_rate": 1.4409822773080043e-05, + "loss": 0.3339, + "step": 72100 + }, + { + "epoch": 2.784277385227229, + "grad_norm": 0.030897267162799835, + "learning_rate": 1.4384081753478256e-05, + "loss": 0.1089, + "step": 72110 + }, + { + "epoch": 2.7846635005212557, + "grad_norm": 0.41380831599235535, + "learning_rate": 1.4358340733876468e-05, + "loss": 0.0955, + "step": 72120 + }, + { + "epoch": 2.785049615815282, + "grad_norm": 0.40943947434425354, + "learning_rate": 1.4332599714274683e-05, + "loss": 0.1309, + "step": 72130 + }, + { + "epoch": 2.785435731109309, + "grad_norm": 5.614469528198242, + "learning_rate": 1.4306858694672898e-05, + "loss": 0.2242, + "step": 72140 + }, + { + "epoch": 2.785821846403336, + "grad_norm": 1.1512348651885986, + "learning_rate": 1.428111767507111e-05, + "loss": 0.1386, + "step": 72150 + }, + { + "epoch": 2.7862079616973627, + "grad_norm": 1.467037558555603, + "learning_rate": 1.4255376655469324e-05, + "loss": 0.1531, + "step": 72160 + }, + { + "epoch": 2.7865940769913897, + "grad_norm": 0.7877001762390137, + "learning_rate": 1.422963563586754e-05, + "loss": 0.1488, + "step": 72170 + }, + { + "epoch": 2.7869801922854163, + "grad_norm": 0.7077778577804565, + "learning_rate": 1.4203894616265751e-05, + "loss": 0.1213, + "step": 72180 + }, + { + "epoch": 2.7873663075794433, + "grad_norm": 0.6909589171409607, + "learning_rate": 1.4178153596663964e-05, + "loss": 0.2211, + "step": 72190 + }, + { + "epoch": 2.78775242287347, + "grad_norm": 0.23613958060741425, + "learning_rate": 1.4152412577062176e-05, + "loss": 0.1896, + "step": 72200 + }, + { + "epoch": 2.788138538167497, + "grad_norm": 1.218856930732727, + "learning_rate": 1.4126671557460391e-05, + "loss": 0.0989, + "step": 72210 + }, + { + "epoch": 2.788524653461524, + "grad_norm": 1.9225094318389893, + "learning_rate": 1.4100930537858606e-05, + "loss": 0.1355, + "step": 72220 + }, + { + "epoch": 2.7889107687555503, + "grad_norm": 1.3572889566421509, + "learning_rate": 1.4075189518256818e-05, + "loss": 0.2708, + "step": 72230 + }, + { + "epoch": 2.7892968840495773, + "grad_norm": 1.4339534044265747, + "learning_rate": 1.4049448498655033e-05, + "loss": 0.173, + "step": 72240 + }, + { + "epoch": 2.789682999343604, + "grad_norm": 1.6303379535675049, + "learning_rate": 1.4023707479053248e-05, + "loss": 0.1791, + "step": 72250 + }, + { + "epoch": 2.790069114637631, + "grad_norm": 0.25583216547966003, + "learning_rate": 1.399796645945146e-05, + "loss": 0.0694, + "step": 72260 + }, + { + "epoch": 2.7904552299316574, + "grad_norm": 0.29900923371315, + "learning_rate": 1.3972225439849673e-05, + "loss": 0.0942, + "step": 72270 + }, + { + "epoch": 2.7908413452256844, + "grad_norm": 0.4300585687160492, + "learning_rate": 1.3946484420247888e-05, + "loss": 0.1378, + "step": 72280 + }, + { + "epoch": 2.7912274605197114, + "grad_norm": 2.40873384475708, + "learning_rate": 1.39207434006461e-05, + "loss": 0.2227, + "step": 72290 + }, + { + "epoch": 2.791613575813738, + "grad_norm": 1.8558450937271118, + "learning_rate": 1.3895002381044314e-05, + "loss": 0.1489, + "step": 72300 + }, + { + "epoch": 2.791999691107765, + "grad_norm": 0.7937175631523132, + "learning_rate": 1.386926136144253e-05, + "loss": 0.2045, + "step": 72310 + }, + { + "epoch": 2.7923858064017915, + "grad_norm": 1.4006129503250122, + "learning_rate": 1.3843520341840741e-05, + "loss": 0.205, + "step": 72320 + }, + { + "epoch": 2.7927719216958184, + "grad_norm": 0.765356183052063, + "learning_rate": 1.3817779322238956e-05, + "loss": 0.0647, + "step": 72330 + }, + { + "epoch": 2.793158036989845, + "grad_norm": 0.9293017983436584, + "learning_rate": 1.3792038302637167e-05, + "loss": 0.149, + "step": 72340 + }, + { + "epoch": 2.793544152283872, + "grad_norm": 0.1647326946258545, + "learning_rate": 1.376629728303538e-05, + "loss": 0.0957, + "step": 72350 + }, + { + "epoch": 2.793930267577899, + "grad_norm": 1.7377187013626099, + "learning_rate": 1.3740556263433596e-05, + "loss": 0.1945, + "step": 72360 + }, + { + "epoch": 2.7943163828719255, + "grad_norm": 1.4326138496398926, + "learning_rate": 1.3714815243831807e-05, + "loss": 0.1362, + "step": 72370 + }, + { + "epoch": 2.7947024981659525, + "grad_norm": 0.07872216403484344, + "learning_rate": 1.3689074224230022e-05, + "loss": 0.1186, + "step": 72380 + }, + { + "epoch": 2.795088613459979, + "grad_norm": 2.5933990478515625, + "learning_rate": 1.3663333204628237e-05, + "loss": 0.3499, + "step": 72390 + }, + { + "epoch": 2.795474728754006, + "grad_norm": 0.8128255009651184, + "learning_rate": 1.3637592185026449e-05, + "loss": 0.1798, + "step": 72400 + }, + { + "epoch": 2.7958608440480326, + "grad_norm": 1.3631271123886108, + "learning_rate": 1.3611851165424664e-05, + "loss": 0.1602, + "step": 72410 + }, + { + "epoch": 2.7962469593420596, + "grad_norm": 0.2131202220916748, + "learning_rate": 1.3586110145822877e-05, + "loss": 0.2031, + "step": 72420 + }, + { + "epoch": 2.7966330746360866, + "grad_norm": 2.3542559146881104, + "learning_rate": 1.3560369126221089e-05, + "loss": 0.2783, + "step": 72430 + }, + { + "epoch": 2.797019189930113, + "grad_norm": 1.075270175933838, + "learning_rate": 1.3534628106619304e-05, + "loss": 0.2014, + "step": 72440 + }, + { + "epoch": 2.7974053052241397, + "grad_norm": 1.6369863748550415, + "learning_rate": 1.3508887087017516e-05, + "loss": 0.1512, + "step": 72450 + }, + { + "epoch": 2.7977914205181666, + "grad_norm": 2.450411081314087, + "learning_rate": 1.348314606741573e-05, + "loss": 0.1899, + "step": 72460 + }, + { + "epoch": 2.7981775358121936, + "grad_norm": 0.5244776010513306, + "learning_rate": 1.3457405047813946e-05, + "loss": 0.1652, + "step": 72470 + }, + { + "epoch": 2.79856365110622, + "grad_norm": 1.206304907798767, + "learning_rate": 1.3431664028212157e-05, + "loss": 0.1695, + "step": 72480 + }, + { + "epoch": 2.798949766400247, + "grad_norm": 1.7752883434295654, + "learning_rate": 1.3405923008610372e-05, + "loss": 0.2092, + "step": 72490 + }, + { + "epoch": 2.799335881694274, + "grad_norm": 2.458888530731201, + "learning_rate": 1.3380181989008586e-05, + "loss": 0.1963, + "step": 72500 + }, + { + "epoch": 2.7997219969883007, + "grad_norm": 0.24970018863677979, + "learning_rate": 1.3354440969406797e-05, + "loss": 0.1438, + "step": 72510 + }, + { + "epoch": 2.8001081122823273, + "grad_norm": 1.6026058197021484, + "learning_rate": 1.3328699949805012e-05, + "loss": 0.1702, + "step": 72520 + }, + { + "epoch": 2.8004942275763542, + "grad_norm": 0.6461538076400757, + "learning_rate": 1.3302958930203227e-05, + "loss": 0.1725, + "step": 72530 + }, + { + "epoch": 2.8008803428703812, + "grad_norm": 0.30602967739105225, + "learning_rate": 1.3277217910601439e-05, + "loss": 0.1018, + "step": 72540 + }, + { + "epoch": 2.801266458164408, + "grad_norm": 0.5502751469612122, + "learning_rate": 1.3251476890999654e-05, + "loss": 0.1793, + "step": 72550 + }, + { + "epoch": 2.8016525734584348, + "grad_norm": 0.2644082307815552, + "learning_rate": 1.3225735871397869e-05, + "loss": 0.0423, + "step": 72560 + }, + { + "epoch": 2.8020386887524618, + "grad_norm": 2.0505154132843018, + "learning_rate": 1.319999485179608e-05, + "loss": 0.2418, + "step": 72570 + }, + { + "epoch": 2.8024248040464883, + "grad_norm": 3.0608208179473877, + "learning_rate": 1.3174253832194294e-05, + "loss": 0.1613, + "step": 72580 + }, + { + "epoch": 2.802810919340515, + "grad_norm": 0.05718081071972847, + "learning_rate": 1.3148512812592505e-05, + "loss": 0.132, + "step": 72590 + }, + { + "epoch": 2.803197034634542, + "grad_norm": 2.007830858230591, + "learning_rate": 1.312277179299072e-05, + "loss": 0.138, + "step": 72600 + }, + { + "epoch": 2.803583149928569, + "grad_norm": 0.4181762933731079, + "learning_rate": 1.3097030773388935e-05, + "loss": 0.0742, + "step": 72610 + }, + { + "epoch": 2.8039692652225954, + "grad_norm": 1.4194786548614502, + "learning_rate": 1.3071289753787147e-05, + "loss": 0.1517, + "step": 72620 + }, + { + "epoch": 2.8043553805166224, + "grad_norm": 0.03250798210501671, + "learning_rate": 1.3045548734185362e-05, + "loss": 0.1, + "step": 72630 + }, + { + "epoch": 2.804741495810649, + "grad_norm": 0.983828067779541, + "learning_rate": 1.3019807714583577e-05, + "loss": 0.1765, + "step": 72640 + }, + { + "epoch": 2.805127611104676, + "grad_norm": 0.4670206904411316, + "learning_rate": 1.2994066694981789e-05, + "loss": 0.0954, + "step": 72650 + }, + { + "epoch": 2.8055137263987024, + "grad_norm": 0.09260097146034241, + "learning_rate": 1.2968325675380002e-05, + "loss": 0.119, + "step": 72660 + }, + { + "epoch": 2.8058998416927294, + "grad_norm": 2.12126088142395, + "learning_rate": 1.2942584655778217e-05, + "loss": 0.1673, + "step": 72670 + }, + { + "epoch": 2.8062859569867564, + "grad_norm": 0.11968256533145905, + "learning_rate": 1.2916843636176429e-05, + "loss": 0.0941, + "step": 72680 + }, + { + "epoch": 2.806672072280783, + "grad_norm": 1.229608416557312, + "learning_rate": 1.2891102616574644e-05, + "loss": 0.2319, + "step": 72690 + }, + { + "epoch": 2.80705818757481, + "grad_norm": 0.7404507994651794, + "learning_rate": 1.2865361596972859e-05, + "loss": 0.1878, + "step": 72700 + }, + { + "epoch": 2.8074443028688365, + "grad_norm": 0.15742874145507812, + "learning_rate": 1.283962057737107e-05, + "loss": 0.2041, + "step": 72710 + }, + { + "epoch": 2.8078304181628635, + "grad_norm": 1.860520601272583, + "learning_rate": 1.2813879557769285e-05, + "loss": 0.1575, + "step": 72720 + }, + { + "epoch": 2.80821653345689, + "grad_norm": 2.309978723526001, + "learning_rate": 1.2788138538167497e-05, + "loss": 0.104, + "step": 72730 + }, + { + "epoch": 2.808602648750917, + "grad_norm": 0.4069162905216217, + "learning_rate": 1.276239751856571e-05, + "loss": 0.1167, + "step": 72740 + }, + { + "epoch": 2.808988764044944, + "grad_norm": 0.9853174090385437, + "learning_rate": 1.2736656498963925e-05, + "loss": 0.1826, + "step": 72750 + }, + { + "epoch": 2.8093748793389706, + "grad_norm": 0.29024359583854675, + "learning_rate": 1.2710915479362137e-05, + "loss": 0.1662, + "step": 72760 + }, + { + "epoch": 2.8097609946329976, + "grad_norm": 1.698085069656372, + "learning_rate": 1.2685174459760352e-05, + "loss": 0.2987, + "step": 72770 + }, + { + "epoch": 2.810147109927024, + "grad_norm": 0.6112163066864014, + "learning_rate": 1.2659433440158567e-05, + "loss": 0.1074, + "step": 72780 + }, + { + "epoch": 2.810533225221051, + "grad_norm": 0.6659224033355713, + "learning_rate": 1.2633692420556778e-05, + "loss": 0.1514, + "step": 72790 + }, + { + "epoch": 2.8109193405150776, + "grad_norm": 0.40598243474960327, + "learning_rate": 1.2607951400954993e-05, + "loss": 0.1022, + "step": 72800 + }, + { + "epoch": 2.8113054558091046, + "grad_norm": 0.3016485571861267, + "learning_rate": 1.2582210381353207e-05, + "loss": 0.1226, + "step": 72810 + }, + { + "epoch": 2.8116915711031316, + "grad_norm": 0.8959118127822876, + "learning_rate": 1.2556469361751418e-05, + "loss": 0.1283, + "step": 72820 + }, + { + "epoch": 2.812077686397158, + "grad_norm": 0.1264275461435318, + "learning_rate": 1.2530728342149633e-05, + "loss": 0.1191, + "step": 72830 + }, + { + "epoch": 2.812463801691185, + "grad_norm": 0.06748595088720322, + "learning_rate": 1.2504987322547848e-05, + "loss": 0.1728, + "step": 72840 + }, + { + "epoch": 2.8128499169852117, + "grad_norm": 1.8376634120941162, + "learning_rate": 1.247924630294606e-05, + "loss": 0.1773, + "step": 72850 + }, + { + "epoch": 2.8132360322792387, + "grad_norm": 1.0151314735412598, + "learning_rate": 1.2453505283344275e-05, + "loss": 0.0979, + "step": 72860 + }, + { + "epoch": 2.8136221475732652, + "grad_norm": 0.274681955575943, + "learning_rate": 1.2427764263742488e-05, + "loss": 0.1806, + "step": 72870 + }, + { + "epoch": 2.814008262867292, + "grad_norm": 3.2869608402252197, + "learning_rate": 1.2402023244140702e-05, + "loss": 0.1469, + "step": 72880 + }, + { + "epoch": 2.814394378161319, + "grad_norm": 0.7580883502960205, + "learning_rate": 1.2376282224538915e-05, + "loss": 0.175, + "step": 72890 + }, + { + "epoch": 2.8147804934553458, + "grad_norm": 0.26281633973121643, + "learning_rate": 1.2350541204937128e-05, + "loss": 0.1801, + "step": 72900 + }, + { + "epoch": 2.8151666087493723, + "grad_norm": 0.5014416575431824, + "learning_rate": 1.2324800185335341e-05, + "loss": 0.1159, + "step": 72910 + }, + { + "epoch": 2.8155527240433993, + "grad_norm": 0.5112303495407104, + "learning_rate": 1.2299059165733555e-05, + "loss": 0.0988, + "step": 72920 + }, + { + "epoch": 2.8159388393374263, + "grad_norm": 1.928898811340332, + "learning_rate": 1.227331814613177e-05, + "loss": 0.1261, + "step": 72930 + }, + { + "epoch": 2.816324954631453, + "grad_norm": 2.8064088821411133, + "learning_rate": 1.2247577126529983e-05, + "loss": 0.1811, + "step": 72940 + }, + { + "epoch": 2.81671106992548, + "grad_norm": 1.5859709978103638, + "learning_rate": 1.2221836106928196e-05, + "loss": 0.2235, + "step": 72950 + }, + { + "epoch": 2.817097185219507, + "grad_norm": 0.6035090684890747, + "learning_rate": 1.219609508732641e-05, + "loss": 0.0931, + "step": 72960 + }, + { + "epoch": 2.8174833005135334, + "grad_norm": 0.19035843014717102, + "learning_rate": 1.2170354067724623e-05, + "loss": 0.182, + "step": 72970 + }, + { + "epoch": 2.81786941580756, + "grad_norm": 0.2380947321653366, + "learning_rate": 1.2144613048122836e-05, + "loss": 0.0744, + "step": 72980 + }, + { + "epoch": 2.818255531101587, + "grad_norm": 0.5143213868141174, + "learning_rate": 1.211887202852105e-05, + "loss": 0.1252, + "step": 72990 + }, + { + "epoch": 2.818641646395614, + "grad_norm": 1.6957656145095825, + "learning_rate": 1.2093131008919265e-05, + "loss": 0.1701, + "step": 73000 + }, + { + "epoch": 2.8190277616896404, + "grad_norm": 0.31234198808670044, + "learning_rate": 1.2067389989317478e-05, + "loss": 0.0638, + "step": 73010 + }, + { + "epoch": 2.8194138769836674, + "grad_norm": 1.2418551445007324, + "learning_rate": 1.2041648969715691e-05, + "loss": 0.1972, + "step": 73020 + }, + { + "epoch": 2.819799992277694, + "grad_norm": 0.08601387590169907, + "learning_rate": 1.2015907950113905e-05, + "loss": 0.1808, + "step": 73030 + }, + { + "epoch": 2.820186107571721, + "grad_norm": 0.7416751384735107, + "learning_rate": 1.1990166930512118e-05, + "loss": 0.0543, + "step": 73040 + }, + { + "epoch": 2.8205722228657475, + "grad_norm": 0.957653284072876, + "learning_rate": 1.1964425910910331e-05, + "loss": 0.125, + "step": 73050 + }, + { + "epoch": 2.8209583381597745, + "grad_norm": 0.34786149859428406, + "learning_rate": 1.1938684891308545e-05, + "loss": 0.2388, + "step": 73060 + }, + { + "epoch": 2.8213444534538015, + "grad_norm": 1.0569926500320435, + "learning_rate": 1.1912943871706758e-05, + "loss": 0.1199, + "step": 73070 + }, + { + "epoch": 2.821730568747828, + "grad_norm": 1.948347568511963, + "learning_rate": 1.1887202852104973e-05, + "loss": 0.195, + "step": 73080 + }, + { + "epoch": 2.822116684041855, + "grad_norm": 0.18845289945602417, + "learning_rate": 1.1861461832503186e-05, + "loss": 0.295, + "step": 73090 + }, + { + "epoch": 2.8225027993358816, + "grad_norm": 1.7377262115478516, + "learning_rate": 1.18357208129014e-05, + "loss": 0.3067, + "step": 73100 + }, + { + "epoch": 2.8228889146299085, + "grad_norm": 1.232576847076416, + "learning_rate": 1.1809979793299614e-05, + "loss": 0.1849, + "step": 73110 + }, + { + "epoch": 2.823275029923935, + "grad_norm": 1.6318284273147583, + "learning_rate": 1.1784238773697826e-05, + "loss": 0.0626, + "step": 73120 + }, + { + "epoch": 2.823661145217962, + "grad_norm": 0.5187623500823975, + "learning_rate": 1.175849775409604e-05, + "loss": 0.1255, + "step": 73130 + }, + { + "epoch": 2.824047260511989, + "grad_norm": 2.4732282161712646, + "learning_rate": 1.1732756734494253e-05, + "loss": 0.1433, + "step": 73140 + }, + { + "epoch": 2.8244333758060156, + "grad_norm": 2.080509901046753, + "learning_rate": 1.1707015714892468e-05, + "loss": 0.1533, + "step": 73150 + }, + { + "epoch": 2.8248194911000426, + "grad_norm": 0.61173415184021, + "learning_rate": 1.1681274695290681e-05, + "loss": 0.0807, + "step": 73160 + }, + { + "epoch": 2.825205606394069, + "grad_norm": 0.1210317462682724, + "learning_rate": 1.1655533675688894e-05, + "loss": 0.1433, + "step": 73170 + }, + { + "epoch": 2.825591721688096, + "grad_norm": 0.699754536151886, + "learning_rate": 1.162979265608711e-05, + "loss": 0.091, + "step": 73180 + }, + { + "epoch": 2.8259778369821227, + "grad_norm": 1.1935967206954956, + "learning_rate": 1.1604051636485323e-05, + "loss": 0.1567, + "step": 73190 + }, + { + "epoch": 2.8263639522761497, + "grad_norm": 0.14032036066055298, + "learning_rate": 1.1578310616883534e-05, + "loss": 0.1283, + "step": 73200 + }, + { + "epoch": 2.8267500675701767, + "grad_norm": 0.1003938615322113, + "learning_rate": 1.1552569597281748e-05, + "loss": 0.241, + "step": 73210 + }, + { + "epoch": 2.827136182864203, + "grad_norm": 0.08814160525798798, + "learning_rate": 1.1526828577679963e-05, + "loss": 0.1191, + "step": 73220 + }, + { + "epoch": 2.82752229815823, + "grad_norm": 2.224803924560547, + "learning_rate": 1.1501087558078176e-05, + "loss": 0.1395, + "step": 73230 + }, + { + "epoch": 2.8279084134522567, + "grad_norm": 0.7424294948577881, + "learning_rate": 1.147534653847639e-05, + "loss": 0.2086, + "step": 73240 + }, + { + "epoch": 2.8282945287462837, + "grad_norm": 2.4858903884887695, + "learning_rate": 1.1449605518874604e-05, + "loss": 0.3264, + "step": 73250 + }, + { + "epoch": 2.8286806440403103, + "grad_norm": 0.09597936272621155, + "learning_rate": 1.1423864499272818e-05, + "loss": 0.1849, + "step": 73260 + }, + { + "epoch": 2.8290667593343373, + "grad_norm": 0.14344246685504913, + "learning_rate": 1.1398123479671031e-05, + "loss": 0.0898, + "step": 73270 + }, + { + "epoch": 2.8294528746283643, + "grad_norm": 1.6673258543014526, + "learning_rate": 1.1372382460069242e-05, + "loss": 0.1218, + "step": 73280 + }, + { + "epoch": 2.829838989922391, + "grad_norm": 0.7718226313591003, + "learning_rate": 1.1346641440467457e-05, + "loss": 0.246, + "step": 73290 + }, + { + "epoch": 2.8302251052164173, + "grad_norm": 1.073569416999817, + "learning_rate": 1.132090042086567e-05, + "loss": 0.1469, + "step": 73300 + }, + { + "epoch": 2.8306112205104443, + "grad_norm": 0.6750389933586121, + "learning_rate": 1.1295159401263884e-05, + "loss": 0.0572, + "step": 73310 + }, + { + "epoch": 2.8309973358044713, + "grad_norm": 0.3106151819229126, + "learning_rate": 1.1269418381662099e-05, + "loss": 0.1877, + "step": 73320 + }, + { + "epoch": 2.831383451098498, + "grad_norm": 2.519272804260254, + "learning_rate": 1.1243677362060312e-05, + "loss": 0.3173, + "step": 73330 + }, + { + "epoch": 2.831769566392525, + "grad_norm": 0.1696769744157791, + "learning_rate": 1.1217936342458526e-05, + "loss": 0.0998, + "step": 73340 + }, + { + "epoch": 2.832155681686552, + "grad_norm": 1.1009591817855835, + "learning_rate": 1.1192195322856739e-05, + "loss": 0.1534, + "step": 73350 + }, + { + "epoch": 2.8325417969805784, + "grad_norm": 0.5018852949142456, + "learning_rate": 1.1166454303254952e-05, + "loss": 0.1132, + "step": 73360 + }, + { + "epoch": 2.832927912274605, + "grad_norm": 0.75879967212677, + "learning_rate": 1.1140713283653166e-05, + "loss": 0.2494, + "step": 73370 + }, + { + "epoch": 2.833314027568632, + "grad_norm": 0.6770151853561401, + "learning_rate": 1.1114972264051379e-05, + "loss": 0.127, + "step": 73380 + }, + { + "epoch": 2.833700142862659, + "grad_norm": 0.7802016139030457, + "learning_rate": 1.1089231244449594e-05, + "loss": 0.0463, + "step": 73390 + }, + { + "epoch": 2.8340862581566855, + "grad_norm": 0.6405506134033203, + "learning_rate": 1.1063490224847807e-05, + "loss": 0.1039, + "step": 73400 + }, + { + "epoch": 2.8344723734507125, + "grad_norm": 1.5618057250976562, + "learning_rate": 1.103774920524602e-05, + "loss": 0.2303, + "step": 73410 + }, + { + "epoch": 2.8348584887447394, + "grad_norm": 0.07669465243816376, + "learning_rate": 1.1012008185644234e-05, + "loss": 0.0619, + "step": 73420 + }, + { + "epoch": 2.835244604038766, + "grad_norm": 1.495162010192871, + "learning_rate": 1.0986267166042447e-05, + "loss": 0.3976, + "step": 73430 + }, + { + "epoch": 2.8356307193327925, + "grad_norm": 1.0150856971740723, + "learning_rate": 1.096052614644066e-05, + "loss": 0.1209, + "step": 73440 + }, + { + "epoch": 2.8360168346268195, + "grad_norm": 0.0810672789812088, + "learning_rate": 1.0934785126838874e-05, + "loss": 0.1443, + "step": 73450 + }, + { + "epoch": 2.8364029499208465, + "grad_norm": 1.9854507446289062, + "learning_rate": 1.0909044107237089e-05, + "loss": 0.0724, + "step": 73460 + }, + { + "epoch": 2.836789065214873, + "grad_norm": 1.1847221851348877, + "learning_rate": 1.0883303087635302e-05, + "loss": 0.1215, + "step": 73470 + }, + { + "epoch": 2.8371751805089, + "grad_norm": 0.1890803575515747, + "learning_rate": 1.0857562068033515e-05, + "loss": 0.0693, + "step": 73480 + }, + { + "epoch": 2.8375612958029266, + "grad_norm": 1.4246773719787598, + "learning_rate": 1.0831821048431729e-05, + "loss": 0.1458, + "step": 73490 + }, + { + "epoch": 2.8379474110969536, + "grad_norm": 0.9759969115257263, + "learning_rate": 1.0806080028829944e-05, + "loss": 0.0894, + "step": 73500 + }, + { + "epoch": 2.83833352639098, + "grad_norm": 0.06872682273387909, + "learning_rate": 1.0780339009228155e-05, + "loss": 0.0911, + "step": 73510 + }, + { + "epoch": 2.838719641685007, + "grad_norm": 0.9503278732299805, + "learning_rate": 1.0754597989626369e-05, + "loss": 0.0367, + "step": 73520 + }, + { + "epoch": 2.839105756979034, + "grad_norm": 0.08986163884401321, + "learning_rate": 1.0728856970024582e-05, + "loss": 0.113, + "step": 73530 + }, + { + "epoch": 2.8394918722730607, + "grad_norm": 0.4085181951522827, + "learning_rate": 1.0703115950422797e-05, + "loss": 0.0858, + "step": 73540 + }, + { + "epoch": 2.8398779875670876, + "grad_norm": 2.045522689819336, + "learning_rate": 1.067737493082101e-05, + "loss": 0.0585, + "step": 73550 + }, + { + "epoch": 2.840264102861114, + "grad_norm": 1.701407551765442, + "learning_rate": 1.0651633911219224e-05, + "loss": 0.2093, + "step": 73560 + }, + { + "epoch": 2.840650218155141, + "grad_norm": 0.5299584865570068, + "learning_rate": 1.0625892891617439e-05, + "loss": 0.1673, + "step": 73570 + }, + { + "epoch": 2.8410363334491677, + "grad_norm": 1.289556622505188, + "learning_rate": 1.0600151872015652e-05, + "loss": 0.0876, + "step": 73580 + }, + { + "epoch": 2.8414224487431947, + "grad_norm": 1.0992724895477295, + "learning_rate": 1.0574410852413864e-05, + "loss": 0.1526, + "step": 73590 + }, + { + "epoch": 2.8418085640372217, + "grad_norm": 1.4800548553466797, + "learning_rate": 1.0548669832812077e-05, + "loss": 0.2792, + "step": 73600 + }, + { + "epoch": 2.8421946793312483, + "grad_norm": 0.7440020442008972, + "learning_rate": 1.0522928813210292e-05, + "loss": 0.0838, + "step": 73610 + }, + { + "epoch": 2.8425807946252752, + "grad_norm": 0.5010614395141602, + "learning_rate": 1.0497187793608505e-05, + "loss": 0.2615, + "step": 73620 + }, + { + "epoch": 2.842966909919302, + "grad_norm": 0.6573401093482971, + "learning_rate": 1.0471446774006719e-05, + "loss": 0.1924, + "step": 73630 + }, + { + "epoch": 2.8433530252133288, + "grad_norm": 1.2293144464492798, + "learning_rate": 1.0445705754404934e-05, + "loss": 0.1723, + "step": 73640 + }, + { + "epoch": 2.8437391405073553, + "grad_norm": 1.2916191816329956, + "learning_rate": 1.0419964734803147e-05, + "loss": 0.2163, + "step": 73650 + }, + { + "epoch": 2.8441252558013823, + "grad_norm": 0.48834675550460815, + "learning_rate": 1.039422371520136e-05, + "loss": 0.3001, + "step": 73660 + }, + { + "epoch": 2.8445113710954093, + "grad_norm": 0.4397851228713989, + "learning_rate": 1.0368482695599572e-05, + "loss": 0.1354, + "step": 73670 + }, + { + "epoch": 2.844897486389436, + "grad_norm": 1.6258771419525146, + "learning_rate": 1.0342741675997787e-05, + "loss": 0.1677, + "step": 73680 + }, + { + "epoch": 2.845283601683463, + "grad_norm": 0.5988297462463379, + "learning_rate": 1.0317000656396e-05, + "loss": 0.0911, + "step": 73690 + }, + { + "epoch": 2.8456697169774894, + "grad_norm": 2.7825276851654053, + "learning_rate": 1.0291259636794213e-05, + "loss": 0.2635, + "step": 73700 + }, + { + "epoch": 2.8460558322715164, + "grad_norm": 0.0774473026394844, + "learning_rate": 1.0265518617192428e-05, + "loss": 0.1239, + "step": 73710 + }, + { + "epoch": 2.846441947565543, + "grad_norm": 0.22317875921726227, + "learning_rate": 1.0239777597590642e-05, + "loss": 0.2164, + "step": 73720 + }, + { + "epoch": 2.84682806285957, + "grad_norm": 0.15649321675300598, + "learning_rate": 1.0214036577988855e-05, + "loss": 0.0442, + "step": 73730 + }, + { + "epoch": 2.847214178153597, + "grad_norm": 1.7008354663848877, + "learning_rate": 1.0188295558387068e-05, + "loss": 0.0829, + "step": 73740 + }, + { + "epoch": 2.8476002934476234, + "grad_norm": 0.9527981281280518, + "learning_rate": 1.0162554538785282e-05, + "loss": 0.1655, + "step": 73750 + }, + { + "epoch": 2.84798640874165, + "grad_norm": 0.27073028683662415, + "learning_rate": 1.0136813519183495e-05, + "loss": 0.0909, + "step": 73760 + }, + { + "epoch": 2.848372524035677, + "grad_norm": 0.6584774851799011, + "learning_rate": 1.0111072499581708e-05, + "loss": 0.1365, + "step": 73770 + }, + { + "epoch": 2.848758639329704, + "grad_norm": 0.06433244794607162, + "learning_rate": 1.0085331479979923e-05, + "loss": 0.0936, + "step": 73780 + }, + { + "epoch": 2.8491447546237305, + "grad_norm": 0.31640946865081787, + "learning_rate": 1.0059590460378137e-05, + "loss": 0.053, + "step": 73790 + }, + { + "epoch": 2.8495308699177575, + "grad_norm": 1.378275752067566, + "learning_rate": 1.003384944077635e-05, + "loss": 0.2072, + "step": 73800 + }, + { + "epoch": 2.8499169852117845, + "grad_norm": 0.40181395411491394, + "learning_rate": 1.0008108421174563e-05, + "loss": 0.0458, + "step": 73810 + }, + { + "epoch": 2.850303100505811, + "grad_norm": 0.6035460233688354, + "learning_rate": 9.982367401572777e-06, + "loss": 0.2126, + "step": 73820 + }, + { + "epoch": 2.8506892157998376, + "grad_norm": 1.581748366355896, + "learning_rate": 9.95662638197099e-06, + "loss": 0.2245, + "step": 73830 + }, + { + "epoch": 2.8510753310938646, + "grad_norm": 0.15285348892211914, + "learning_rate": 9.930885362369203e-06, + "loss": 0.2124, + "step": 73840 + }, + { + "epoch": 2.8514614463878916, + "grad_norm": 0.5655555725097656, + "learning_rate": 9.905144342767418e-06, + "loss": 0.215, + "step": 73850 + }, + { + "epoch": 2.851847561681918, + "grad_norm": 0.91652512550354, + "learning_rate": 9.879403323165631e-06, + "loss": 0.0512, + "step": 73860 + }, + { + "epoch": 2.852233676975945, + "grad_norm": 0.4889742136001587, + "learning_rate": 9.853662303563845e-06, + "loss": 0.0565, + "step": 73870 + }, + { + "epoch": 2.852619792269972, + "grad_norm": 0.5656816959381104, + "learning_rate": 9.827921283962058e-06, + "loss": 0.1392, + "step": 73880 + }, + { + "epoch": 2.8530059075639986, + "grad_norm": 0.5749841332435608, + "learning_rate": 9.802180264360273e-06, + "loss": 0.1548, + "step": 73890 + }, + { + "epoch": 2.853392022858025, + "grad_norm": 0.40446341037750244, + "learning_rate": 9.776439244758485e-06, + "loss": 0.1078, + "step": 73900 + }, + { + "epoch": 2.853778138152052, + "grad_norm": 0.7668073773384094, + "learning_rate": 9.750698225156698e-06, + "loss": 0.1282, + "step": 73910 + }, + { + "epoch": 2.854164253446079, + "grad_norm": 1.973522424697876, + "learning_rate": 9.724957205554913e-06, + "loss": 0.1002, + "step": 73920 + }, + { + "epoch": 2.8545503687401057, + "grad_norm": 3.882335662841797, + "learning_rate": 9.699216185953126e-06, + "loss": 0.3315, + "step": 73930 + }, + { + "epoch": 2.8549364840341327, + "grad_norm": 3.6236727237701416, + "learning_rate": 9.67347516635134e-06, + "loss": 0.0749, + "step": 73940 + }, + { + "epoch": 2.8553225993281592, + "grad_norm": 3.9697139263153076, + "learning_rate": 9.647734146749553e-06, + "loss": 0.165, + "step": 73950 + }, + { + "epoch": 2.8557087146221862, + "grad_norm": 0.2589983344078064, + "learning_rate": 9.621993127147768e-06, + "loss": 0.0949, + "step": 73960 + }, + { + "epoch": 2.8560948299162128, + "grad_norm": 1.9424326419830322, + "learning_rate": 9.596252107545981e-06, + "loss": 0.194, + "step": 73970 + }, + { + "epoch": 2.8564809452102398, + "grad_norm": 1.8792887926101685, + "learning_rate": 9.570511087944193e-06, + "loss": 0.1259, + "step": 73980 + }, + { + "epoch": 2.8568670605042668, + "grad_norm": 0.29986900091171265, + "learning_rate": 9.544770068342406e-06, + "loss": 0.0672, + "step": 73990 + }, + { + "epoch": 2.8572531757982933, + "grad_norm": 0.5949634909629822, + "learning_rate": 9.519029048740621e-06, + "loss": 0.1991, + "step": 74000 + }, + { + "epoch": 2.8576392910923203, + "grad_norm": 0.7105257511138916, + "learning_rate": 9.493288029138835e-06, + "loss": 0.1142, + "step": 74010 + }, + { + "epoch": 2.858025406386347, + "grad_norm": 0.1612206995487213, + "learning_rate": 9.467547009537048e-06, + "loss": 0.0743, + "step": 74020 + }, + { + "epoch": 2.858411521680374, + "grad_norm": 0.96357262134552, + "learning_rate": 9.441805989935263e-06, + "loss": 0.1514, + "step": 74030 + }, + { + "epoch": 2.8587976369744004, + "grad_norm": 0.10844029486179352, + "learning_rate": 9.416064970333476e-06, + "loss": 0.0921, + "step": 74040 + }, + { + "epoch": 2.8591837522684274, + "grad_norm": 1.9250043630599976, + "learning_rate": 9.39032395073169e-06, + "loss": 0.2404, + "step": 74050 + }, + { + "epoch": 2.8595698675624543, + "grad_norm": 0.9182831048965454, + "learning_rate": 9.364582931129901e-06, + "loss": 0.1312, + "step": 74060 + }, + { + "epoch": 2.859955982856481, + "grad_norm": 0.45169830322265625, + "learning_rate": 9.338841911528116e-06, + "loss": 0.1484, + "step": 74070 + }, + { + "epoch": 2.860342098150508, + "grad_norm": 1.3599480390548706, + "learning_rate": 9.31310089192633e-06, + "loss": 0.1648, + "step": 74080 + }, + { + "epoch": 2.8607282134445344, + "grad_norm": 0.7859013676643372, + "learning_rate": 9.287359872324543e-06, + "loss": 0.1339, + "step": 74090 + }, + { + "epoch": 2.8611143287385614, + "grad_norm": 0.2625623047351837, + "learning_rate": 9.261618852722758e-06, + "loss": 0.1635, + "step": 74100 + }, + { + "epoch": 2.861500444032588, + "grad_norm": 3.6348588466644287, + "learning_rate": 9.235877833120971e-06, + "loss": 0.1958, + "step": 74110 + }, + { + "epoch": 2.861886559326615, + "grad_norm": 0.2913823127746582, + "learning_rate": 9.210136813519184e-06, + "loss": 0.1228, + "step": 74120 + }, + { + "epoch": 2.862272674620642, + "grad_norm": 0.8785780072212219, + "learning_rate": 9.184395793917398e-06, + "loss": 0.1284, + "step": 74130 + }, + { + "epoch": 2.8626587899146685, + "grad_norm": 0.20822127163410187, + "learning_rate": 9.158654774315611e-06, + "loss": 0.2853, + "step": 74140 + }, + { + "epoch": 2.8630449052086955, + "grad_norm": 0.12501594424247742, + "learning_rate": 9.132913754713824e-06, + "loss": 0.2773, + "step": 74150 + }, + { + "epoch": 2.863431020502722, + "grad_norm": 0.7225301861763, + "learning_rate": 9.107172735112038e-06, + "loss": 0.158, + "step": 74160 + }, + { + "epoch": 2.863817135796749, + "grad_norm": 0.15984690189361572, + "learning_rate": 9.081431715510253e-06, + "loss": 0.2105, + "step": 74170 + }, + { + "epoch": 2.8642032510907756, + "grad_norm": 0.9793509840965271, + "learning_rate": 9.055690695908466e-06, + "loss": 0.231, + "step": 74180 + }, + { + "epoch": 2.8645893663848025, + "grad_norm": 1.3881072998046875, + "learning_rate": 9.02994967630668e-06, + "loss": 0.1013, + "step": 74190 + }, + { + "epoch": 2.8649754816788295, + "grad_norm": 2.9956090450286865, + "learning_rate": 9.004208656704893e-06, + "loss": 0.1328, + "step": 74200 + }, + { + "epoch": 2.865361596972856, + "grad_norm": 1.1672799587249756, + "learning_rate": 8.978467637103106e-06, + "loss": 0.1454, + "step": 74210 + }, + { + "epoch": 2.8657477122668826, + "grad_norm": 0.5341854095458984, + "learning_rate": 8.952726617501319e-06, + "loss": 0.0753, + "step": 74220 + }, + { + "epoch": 2.8661338275609096, + "grad_norm": 0.19304673373699188, + "learning_rate": 8.926985597899532e-06, + "loss": 0.1962, + "step": 74230 + }, + { + "epoch": 2.8665199428549366, + "grad_norm": 0.33846932649612427, + "learning_rate": 8.901244578297747e-06, + "loss": 0.1089, + "step": 74240 + }, + { + "epoch": 2.866906058148963, + "grad_norm": 1.007706642150879, + "learning_rate": 8.87550355869596e-06, + "loss": 0.186, + "step": 74250 + }, + { + "epoch": 2.86729217344299, + "grad_norm": 1.7284183502197266, + "learning_rate": 8.849762539094174e-06, + "loss": 0.2445, + "step": 74260 + }, + { + "epoch": 2.867678288737017, + "grad_norm": 3.7386040687561035, + "learning_rate": 8.824021519492387e-06, + "loss": 0.1044, + "step": 74270 + }, + { + "epoch": 2.8680644040310437, + "grad_norm": 0.5097699761390686, + "learning_rate": 8.7982804998906e-06, + "loss": 0.1357, + "step": 74280 + }, + { + "epoch": 2.8684505193250702, + "grad_norm": 1.620544195175171, + "learning_rate": 8.772539480288814e-06, + "loss": 0.1567, + "step": 74290 + }, + { + "epoch": 2.868836634619097, + "grad_norm": 0.6678496599197388, + "learning_rate": 8.746798460687027e-06, + "loss": 0.1495, + "step": 74300 + }, + { + "epoch": 2.869222749913124, + "grad_norm": 0.5393665432929993, + "learning_rate": 8.721057441085242e-06, + "loss": 0.1611, + "step": 74310 + }, + { + "epoch": 2.8696088652071507, + "grad_norm": 2.98551344871521, + "learning_rate": 8.695316421483456e-06, + "loss": 0.2125, + "step": 74320 + }, + { + "epoch": 2.8699949805011777, + "grad_norm": 0.36715471744537354, + "learning_rate": 8.669575401881669e-06, + "loss": 0.0974, + "step": 74330 + }, + { + "epoch": 2.8703810957952043, + "grad_norm": 2.0747640132904053, + "learning_rate": 8.643834382279882e-06, + "loss": 0.1571, + "step": 74340 + }, + { + "epoch": 2.8707672110892313, + "grad_norm": 3.940426826477051, + "learning_rate": 8.618093362678097e-06, + "loss": 0.1706, + "step": 74350 + }, + { + "epoch": 2.871153326383258, + "grad_norm": 0.07064225524663925, + "learning_rate": 8.59235234307631e-06, + "loss": 0.1104, + "step": 74360 + }, + { + "epoch": 2.871539441677285, + "grad_norm": 2.7614259719848633, + "learning_rate": 8.566611323474522e-06, + "loss": 0.2461, + "step": 74370 + }, + { + "epoch": 2.871925556971312, + "grad_norm": 0.1670389324426651, + "learning_rate": 8.540870303872736e-06, + "loss": 0.0632, + "step": 74380 + }, + { + "epoch": 2.8723116722653383, + "grad_norm": 0.16744600236415863, + "learning_rate": 8.51512928427095e-06, + "loss": 0.0861, + "step": 74390 + }, + { + "epoch": 2.8726977875593653, + "grad_norm": 1.4196829795837402, + "learning_rate": 8.489388264669164e-06, + "loss": 0.1127, + "step": 74400 + }, + { + "epoch": 2.873083902853392, + "grad_norm": 0.9786797165870667, + "learning_rate": 8.463647245067377e-06, + "loss": 0.1925, + "step": 74410 + }, + { + "epoch": 2.873470018147419, + "grad_norm": 1.3412476778030396, + "learning_rate": 8.437906225465592e-06, + "loss": 0.2199, + "step": 74420 + }, + { + "epoch": 2.8738561334414454, + "grad_norm": 0.7068845629692078, + "learning_rate": 8.412165205863805e-06, + "loss": 0.1286, + "step": 74430 + }, + { + "epoch": 2.8742422487354724, + "grad_norm": 0.9745468497276306, + "learning_rate": 8.386424186262019e-06, + "loss": 0.2113, + "step": 74440 + }, + { + "epoch": 2.8746283640294994, + "grad_norm": 1.4095795154571533, + "learning_rate": 8.36068316666023e-06, + "loss": 0.1474, + "step": 74450 + }, + { + "epoch": 2.875014479323526, + "grad_norm": 0.5530760884284973, + "learning_rate": 8.334942147058445e-06, + "loss": 0.1351, + "step": 74460 + }, + { + "epoch": 2.875400594617553, + "grad_norm": 1.8106993436813354, + "learning_rate": 8.309201127456659e-06, + "loss": 0.1643, + "step": 74470 + }, + { + "epoch": 2.8757867099115795, + "grad_norm": 0.3154134154319763, + "learning_rate": 8.283460107854872e-06, + "loss": 0.052, + "step": 74480 + }, + { + "epoch": 2.8761728252056065, + "grad_norm": 0.2410293072462082, + "learning_rate": 8.257719088253087e-06, + "loss": 0.0817, + "step": 74490 + }, + { + "epoch": 2.876558940499633, + "grad_norm": 0.7377256155014038, + "learning_rate": 8.2319780686513e-06, + "loss": 0.1561, + "step": 74500 + }, + { + "epoch": 2.87694505579366, + "grad_norm": 3.0138823986053467, + "learning_rate": 8.206237049049514e-06, + "loss": 0.1963, + "step": 74510 + }, + { + "epoch": 2.877331171087687, + "grad_norm": 2.0430660247802734, + "learning_rate": 8.180496029447727e-06, + "loss": 0.0937, + "step": 74520 + }, + { + "epoch": 2.8777172863817135, + "grad_norm": 0.4840744435787201, + "learning_rate": 8.15475500984594e-06, + "loss": 0.1026, + "step": 74530 + }, + { + "epoch": 2.8781034016757405, + "grad_norm": 1.995553731918335, + "learning_rate": 8.129013990244154e-06, + "loss": 0.1112, + "step": 74540 + }, + { + "epoch": 2.878489516969767, + "grad_norm": 2.139451742172241, + "learning_rate": 8.103272970642367e-06, + "loss": 0.2066, + "step": 74550 + }, + { + "epoch": 2.878875632263794, + "grad_norm": 1.6176917552947998, + "learning_rate": 8.077531951040582e-06, + "loss": 0.1588, + "step": 74560 + }, + { + "epoch": 2.8792617475578206, + "grad_norm": 0.965374231338501, + "learning_rate": 8.051790931438795e-06, + "loss": 0.1274, + "step": 74570 + }, + { + "epoch": 2.8796478628518476, + "grad_norm": 2.842817544937134, + "learning_rate": 8.026049911837009e-06, + "loss": 0.1503, + "step": 74580 + }, + { + "epoch": 2.8800339781458746, + "grad_norm": 0.3894018232822418, + "learning_rate": 8.000308892235222e-06, + "loss": 0.1052, + "step": 74590 + }, + { + "epoch": 2.880420093439901, + "grad_norm": 1.3701835870742798, + "learning_rate": 7.974567872633435e-06, + "loss": 0.2016, + "step": 74600 + }, + { + "epoch": 2.8808062087339277, + "grad_norm": 1.6000498533248901, + "learning_rate": 7.948826853031648e-06, + "loss": 0.1766, + "step": 74610 + }, + { + "epoch": 2.8811923240279547, + "grad_norm": 0.3492911159992218, + "learning_rate": 7.923085833429862e-06, + "loss": 0.2132, + "step": 74620 + }, + { + "epoch": 2.8815784393219817, + "grad_norm": 2.516202926635742, + "learning_rate": 7.897344813828077e-06, + "loss": 0.1888, + "step": 74630 + }, + { + "epoch": 2.881964554616008, + "grad_norm": 1.8804141283035278, + "learning_rate": 7.87160379422629e-06, + "loss": 0.2125, + "step": 74640 + }, + { + "epoch": 2.882350669910035, + "grad_norm": 1.4559141397476196, + "learning_rate": 7.845862774624503e-06, + "loss": 0.2118, + "step": 74650 + }, + { + "epoch": 2.882736785204062, + "grad_norm": 1.0181102752685547, + "learning_rate": 7.820121755022717e-06, + "loss": 0.2107, + "step": 74660 + }, + { + "epoch": 2.8831229004980887, + "grad_norm": 0.8501084446907043, + "learning_rate": 7.79438073542093e-06, + "loss": 0.1405, + "step": 74670 + }, + { + "epoch": 2.8835090157921153, + "grad_norm": 0.6537768244743347, + "learning_rate": 7.768639715819143e-06, + "loss": 0.0951, + "step": 74680 + }, + { + "epoch": 2.8838951310861423, + "grad_norm": 1.6745322942733765, + "learning_rate": 7.742898696217357e-06, + "loss": 0.1451, + "step": 74690 + }, + { + "epoch": 2.8842812463801693, + "grad_norm": 0.046329010277986526, + "learning_rate": 7.717157676615572e-06, + "loss": 0.0637, + "step": 74700 + }, + { + "epoch": 2.884667361674196, + "grad_norm": 1.1959903240203857, + "learning_rate": 7.691416657013785e-06, + "loss": 0.1841, + "step": 74710 + }, + { + "epoch": 2.885053476968223, + "grad_norm": 2.5898563861846924, + "learning_rate": 7.665675637411998e-06, + "loss": 0.1309, + "step": 74720 + }, + { + "epoch": 2.8854395922622498, + "grad_norm": 0.05841336399316788, + "learning_rate": 7.639934617810212e-06, + "loss": 0.0825, + "step": 74730 + }, + { + "epoch": 2.8858257075562763, + "grad_norm": 1.5657339096069336, + "learning_rate": 7.614193598208426e-06, + "loss": 0.0816, + "step": 74740 + }, + { + "epoch": 2.886211822850303, + "grad_norm": 1.2975622415542603, + "learning_rate": 7.588452578606639e-06, + "loss": 0.2084, + "step": 74750 + }, + { + "epoch": 2.88659793814433, + "grad_norm": 0.8901941180229187, + "learning_rate": 7.562711559004852e-06, + "loss": 0.0946, + "step": 74760 + }, + { + "epoch": 2.886984053438357, + "grad_norm": 0.13277596235275269, + "learning_rate": 7.5369705394030665e-06, + "loss": 0.1313, + "step": 74770 + }, + { + "epoch": 2.8873701687323834, + "grad_norm": 0.6199597120285034, + "learning_rate": 7.51122951980128e-06, + "loss": 0.1139, + "step": 74780 + }, + { + "epoch": 2.8877562840264104, + "grad_norm": 0.041490960866212845, + "learning_rate": 7.485488500199493e-06, + "loss": 0.0945, + "step": 74790 + }, + { + "epoch": 2.888142399320437, + "grad_norm": 0.06974820047616959, + "learning_rate": 7.4597474805977065e-06, + "loss": 0.1619, + "step": 74800 + }, + { + "epoch": 2.888528514614464, + "grad_norm": 0.5965213775634766, + "learning_rate": 7.434006460995921e-06, + "loss": 0.1294, + "step": 74810 + }, + { + "epoch": 2.8889146299084905, + "grad_norm": 1.50202214717865, + "learning_rate": 7.408265441394134e-06, + "loss": 0.2038, + "step": 74820 + }, + { + "epoch": 2.8893007452025175, + "grad_norm": 0.2680859863758087, + "learning_rate": 7.382524421792347e-06, + "loss": 0.1566, + "step": 74830 + }, + { + "epoch": 2.8896868604965444, + "grad_norm": 0.8714569211006165, + "learning_rate": 7.3567834021905606e-06, + "loss": 0.1103, + "step": 74840 + }, + { + "epoch": 2.890072975790571, + "grad_norm": 1.0597981214523315, + "learning_rate": 7.331042382588775e-06, + "loss": 0.1141, + "step": 74850 + }, + { + "epoch": 2.890459091084598, + "grad_norm": 1.1880320310592651, + "learning_rate": 7.305301362986988e-06, + "loss": 0.1203, + "step": 74860 + }, + { + "epoch": 2.8908452063786245, + "grad_norm": 0.8900028467178345, + "learning_rate": 7.279560343385201e-06, + "loss": 0.2258, + "step": 74870 + }, + { + "epoch": 2.8912313216726515, + "grad_norm": 0.6879392266273499, + "learning_rate": 7.253819323783416e-06, + "loss": 0.1561, + "step": 74880 + }, + { + "epoch": 2.891617436966678, + "grad_norm": 0.3370697498321533, + "learning_rate": 7.228078304181629e-06, + "loss": 0.2218, + "step": 74890 + }, + { + "epoch": 2.892003552260705, + "grad_norm": 2.0778462886810303, + "learning_rate": 7.202337284579842e-06, + "loss": 0.2195, + "step": 74900 + }, + { + "epoch": 2.892389667554732, + "grad_norm": 0.09895554929971695, + "learning_rate": 7.1765962649780554e-06, + "loss": 0.148, + "step": 74910 + }, + { + "epoch": 2.8927757828487586, + "grad_norm": 3.897048234939575, + "learning_rate": 7.1508552453762704e-06, + "loss": 0.1535, + "step": 74920 + }, + { + "epoch": 2.8931618981427856, + "grad_norm": 0.4604206383228302, + "learning_rate": 7.125114225774483e-06, + "loss": 0.1037, + "step": 74930 + }, + { + "epoch": 2.893548013436812, + "grad_norm": 1.1230722665786743, + "learning_rate": 7.099373206172696e-06, + "loss": 0.2625, + "step": 74940 + }, + { + "epoch": 2.893934128730839, + "grad_norm": 0.0566195473074913, + "learning_rate": 7.073632186570911e-06, + "loss": 0.1807, + "step": 74950 + }, + { + "epoch": 2.8943202440248657, + "grad_norm": 0.08504597842693329, + "learning_rate": 7.0478911669691245e-06, + "loss": 0.1639, + "step": 74960 + }, + { + "epoch": 2.8947063593188926, + "grad_norm": 1.4992380142211914, + "learning_rate": 7.022150147367337e-06, + "loss": 0.219, + "step": 74970 + }, + { + "epoch": 2.8950924746129196, + "grad_norm": 1.0938425064086914, + "learning_rate": 6.99640912776555e-06, + "loss": 0.1467, + "step": 74980 + }, + { + "epoch": 2.895478589906946, + "grad_norm": 2.4818408489227295, + "learning_rate": 6.970668108163765e-06, + "loss": 0.107, + "step": 74990 + }, + { + "epoch": 2.895864705200973, + "grad_norm": 0.18132860958576202, + "learning_rate": 6.944927088561979e-06, + "loss": 0.0966, + "step": 75000 + }, + { + "epoch": 2.8962508204949997, + "grad_norm": 1.1011055707931519, + "learning_rate": 6.919186068960191e-06, + "loss": 0.177, + "step": 75010 + }, + { + "epoch": 2.8966369357890267, + "grad_norm": 0.2061525285243988, + "learning_rate": 6.893445049358406e-06, + "loss": 0.1528, + "step": 75020 + }, + { + "epoch": 2.8970230510830532, + "grad_norm": 0.651801347732544, + "learning_rate": 6.867704029756619e-06, + "loss": 0.2042, + "step": 75030 + }, + { + "epoch": 2.8974091663770802, + "grad_norm": 0.5397346019744873, + "learning_rate": 6.841963010154833e-06, + "loss": 0.2099, + "step": 75040 + }, + { + "epoch": 2.8977952816711072, + "grad_norm": 2.301893949508667, + "learning_rate": 6.816221990553045e-06, + "loss": 0.107, + "step": 75050 + }, + { + "epoch": 2.8981813969651338, + "grad_norm": 0.7261497378349304, + "learning_rate": 6.79048097095126e-06, + "loss": 0.222, + "step": 75060 + }, + { + "epoch": 2.8985675122591603, + "grad_norm": 2.5633339881896973, + "learning_rate": 6.7647399513494735e-06, + "loss": 0.2573, + "step": 75070 + }, + { + "epoch": 2.8989536275531873, + "grad_norm": 1.5711263418197632, + "learning_rate": 6.738998931747687e-06, + "loss": 0.22, + "step": 75080 + }, + { + "epoch": 2.8993397428472143, + "grad_norm": 0.3720754086971283, + "learning_rate": 6.713257912145901e-06, + "loss": 0.2248, + "step": 75090 + }, + { + "epoch": 2.899725858141241, + "grad_norm": 6.319779396057129, + "learning_rate": 6.687516892544114e-06, + "loss": 0.1005, + "step": 75100 + }, + { + "epoch": 2.900111973435268, + "grad_norm": 0.8868811130523682, + "learning_rate": 6.661775872942328e-06, + "loss": 0.2097, + "step": 75110 + }, + { + "epoch": 2.900498088729295, + "grad_norm": 0.3579706847667694, + "learning_rate": 6.636034853340541e-06, + "loss": 0.1209, + "step": 75120 + }, + { + "epoch": 2.9008842040233214, + "grad_norm": 2.9907472133636475, + "learning_rate": 6.610293833738755e-06, + "loss": 0.1789, + "step": 75130 + }, + { + "epoch": 2.901270319317348, + "grad_norm": 0.06285133957862854, + "learning_rate": 6.584552814136968e-06, + "loss": 0.1437, + "step": 75140 + }, + { + "epoch": 2.901656434611375, + "grad_norm": 1.304377555847168, + "learning_rate": 6.558811794535182e-06, + "loss": 0.1315, + "step": 75150 + }, + { + "epoch": 2.902042549905402, + "grad_norm": 0.7678247690200806, + "learning_rate": 6.533070774933396e-06, + "loss": 0.1163, + "step": 75160 + }, + { + "epoch": 2.9024286651994284, + "grad_norm": 0.30619505047798157, + "learning_rate": 6.507329755331609e-06, + "loss": 0.0777, + "step": 75170 + }, + { + "epoch": 2.9028147804934554, + "grad_norm": 2.499606132507324, + "learning_rate": 6.4815887357298225e-06, + "loss": 0.2395, + "step": 75180 + }, + { + "epoch": 2.9032008957874824, + "grad_norm": 0.9318193793296814, + "learning_rate": 6.455847716128036e-06, + "loss": 0.152, + "step": 75190 + }, + { + "epoch": 2.903587011081509, + "grad_norm": 1.8113256692886353, + "learning_rate": 6.43010669652625e-06, + "loss": 0.1505, + "step": 75200 + }, + { + "epoch": 2.9039731263755355, + "grad_norm": 1.9092323780059814, + "learning_rate": 6.404365676924463e-06, + "loss": 0.2304, + "step": 75210 + }, + { + "epoch": 2.9043592416695625, + "grad_norm": 0.5695223212242126, + "learning_rate": 6.3786246573226765e-06, + "loss": 0.1125, + "step": 75220 + }, + { + "epoch": 2.9047453569635895, + "grad_norm": 2.3485186100006104, + "learning_rate": 6.35288363772089e-06, + "loss": 0.2701, + "step": 75230 + }, + { + "epoch": 2.905131472257616, + "grad_norm": 1.5487502813339233, + "learning_rate": 6.327142618119104e-06, + "loss": 0.1471, + "step": 75240 + }, + { + "epoch": 2.905517587551643, + "grad_norm": 2.2842421531677246, + "learning_rate": 6.301401598517317e-06, + "loss": 0.1211, + "step": 75250 + }, + { + "epoch": 2.9059037028456696, + "grad_norm": 0.5499130487442017, + "learning_rate": 6.275660578915531e-06, + "loss": 0.1515, + "step": 75260 + }, + { + "epoch": 2.9062898181396966, + "grad_norm": 0.7656468749046326, + "learning_rate": 6.249919559313745e-06, + "loss": 0.0743, + "step": 75270 + }, + { + "epoch": 2.906675933433723, + "grad_norm": 1.4212474822998047, + "learning_rate": 6.224178539711958e-06, + "loss": 0.1706, + "step": 75280 + }, + { + "epoch": 2.90706204872775, + "grad_norm": 0.5614858269691467, + "learning_rate": 6.1984375201101714e-06, + "loss": 0.1551, + "step": 75290 + }, + { + "epoch": 2.907448164021777, + "grad_norm": 0.2481578141450882, + "learning_rate": 6.172696500508386e-06, + "loss": 0.1594, + "step": 75300 + }, + { + "epoch": 2.9078342793158036, + "grad_norm": 0.774730920791626, + "learning_rate": 6.1469554809066e-06, + "loss": 0.2387, + "step": 75310 + }, + { + "epoch": 2.9082203946098306, + "grad_norm": 0.12540464103221893, + "learning_rate": 6.121214461304812e-06, + "loss": 0.1436, + "step": 75320 + }, + { + "epoch": 2.908606509903857, + "grad_norm": 0.36908024549484253, + "learning_rate": 6.095473441703026e-06, + "loss": 0.1046, + "step": 75330 + }, + { + "epoch": 2.908992625197884, + "grad_norm": 0.5687906742095947, + "learning_rate": 6.06973242210124e-06, + "loss": 0.1454, + "step": 75340 + }, + { + "epoch": 2.9093787404919107, + "grad_norm": 1.373570442199707, + "learning_rate": 6.043991402499454e-06, + "loss": 0.203, + "step": 75350 + }, + { + "epoch": 2.9097648557859377, + "grad_norm": 0.40615278482437134, + "learning_rate": 6.018250382897666e-06, + "loss": 0.0836, + "step": 75360 + }, + { + "epoch": 2.9101509710799647, + "grad_norm": 1.5747426748275757, + "learning_rate": 5.9925093632958805e-06, + "loss": 0.1206, + "step": 75370 + }, + { + "epoch": 2.9105370863739912, + "grad_norm": 0.9298601746559143, + "learning_rate": 5.966768343694094e-06, + "loss": 0.1219, + "step": 75380 + }, + { + "epoch": 2.910923201668018, + "grad_norm": 0.7346408367156982, + "learning_rate": 5.941027324092308e-06, + "loss": 0.1776, + "step": 75390 + }, + { + "epoch": 2.9113093169620448, + "grad_norm": 0.6600155234336853, + "learning_rate": 5.915286304490521e-06, + "loss": 0.1511, + "step": 75400 + }, + { + "epoch": 2.9116954322560717, + "grad_norm": 0.3379638195037842, + "learning_rate": 5.8895452848887345e-06, + "loss": 0.1602, + "step": 75410 + }, + { + "epoch": 2.9120815475500983, + "grad_norm": 0.7085258960723877, + "learning_rate": 5.863804265286949e-06, + "loss": 0.0688, + "step": 75420 + }, + { + "epoch": 2.9124676628441253, + "grad_norm": 0.379912793636322, + "learning_rate": 5.838063245685162e-06, + "loss": 0.2218, + "step": 75430 + }, + { + "epoch": 2.9128537781381523, + "grad_norm": 1.7112829685211182, + "learning_rate": 5.812322226083375e-06, + "loss": 0.0864, + "step": 75440 + }, + { + "epoch": 2.913239893432179, + "grad_norm": 0.9781870245933533, + "learning_rate": 5.786581206481589e-06, + "loss": 0.1837, + "step": 75450 + }, + { + "epoch": 2.913626008726206, + "grad_norm": 0.6391132473945618, + "learning_rate": 5.760840186879803e-06, + "loss": 0.0902, + "step": 75460 + }, + { + "epoch": 2.9140121240202324, + "grad_norm": 1.400060772895813, + "learning_rate": 5.735099167278016e-06, + "loss": 0.1623, + "step": 75470 + }, + { + "epoch": 2.9143982393142593, + "grad_norm": 1.2771530151367188, + "learning_rate": 5.709358147676229e-06, + "loss": 0.1589, + "step": 75480 + }, + { + "epoch": 2.914784354608286, + "grad_norm": 0.37462353706359863, + "learning_rate": 5.683617128074444e-06, + "loss": 0.1241, + "step": 75490 + }, + { + "epoch": 2.915170469902313, + "grad_norm": 1.5162663459777832, + "learning_rate": 5.657876108472657e-06, + "loss": 0.0891, + "step": 75500 + }, + { + "epoch": 2.91555658519634, + "grad_norm": 2.389887809753418, + "learning_rate": 5.63213508887087e-06, + "loss": 0.1397, + "step": 75510 + }, + { + "epoch": 2.9159427004903664, + "grad_norm": 0.4746516942977905, + "learning_rate": 5.6063940692690835e-06, + "loss": 0.1362, + "step": 75520 + }, + { + "epoch": 2.916328815784393, + "grad_norm": 1.1355524063110352, + "learning_rate": 5.580653049667298e-06, + "loss": 0.1772, + "step": 75530 + }, + { + "epoch": 2.91671493107842, + "grad_norm": 0.18736037611961365, + "learning_rate": 5.554912030065512e-06, + "loss": 0.1449, + "step": 75540 + }, + { + "epoch": 2.917101046372447, + "grad_norm": 2.575685501098633, + "learning_rate": 5.529171010463724e-06, + "loss": 0.1871, + "step": 75550 + }, + { + "epoch": 2.9174871616664735, + "grad_norm": 1.01112699508667, + "learning_rate": 5.5034299908619385e-06, + "loss": 0.1588, + "step": 75560 + }, + { + "epoch": 2.9178732769605005, + "grad_norm": 1.8929286003112793, + "learning_rate": 5.477688971260152e-06, + "loss": 0.1564, + "step": 75570 + }, + { + "epoch": 2.9182593922545275, + "grad_norm": 0.21724069118499756, + "learning_rate": 5.451947951658366e-06, + "loss": 0.2152, + "step": 75580 + }, + { + "epoch": 2.918645507548554, + "grad_norm": 1.2024441957473755, + "learning_rate": 5.426206932056578e-06, + "loss": 0.3032, + "step": 75590 + }, + { + "epoch": 2.9190316228425806, + "grad_norm": 0.14324747025966644, + "learning_rate": 5.4004659124547925e-06, + "loss": 0.0674, + "step": 75600 + }, + { + "epoch": 2.9194177381366075, + "grad_norm": 1.427650809288025, + "learning_rate": 5.374724892853006e-06, + "loss": 0.1086, + "step": 75610 + }, + { + "epoch": 2.9198038534306345, + "grad_norm": 1.2401851415634155, + "learning_rate": 5.34898387325122e-06, + "loss": 0.0944, + "step": 75620 + }, + { + "epoch": 2.920189968724661, + "grad_norm": 1.4218640327453613, + "learning_rate": 5.323242853649433e-06, + "loss": 0.1386, + "step": 75630 + }, + { + "epoch": 2.920576084018688, + "grad_norm": 1.0168864727020264, + "learning_rate": 5.297501834047647e-06, + "loss": 0.1291, + "step": 75640 + }, + { + "epoch": 2.9209621993127146, + "grad_norm": 0.2599659860134125, + "learning_rate": 5.271760814445861e-06, + "loss": 0.1055, + "step": 75650 + }, + { + "epoch": 2.9213483146067416, + "grad_norm": 2.1232173442840576, + "learning_rate": 5.246019794844074e-06, + "loss": 0.1521, + "step": 75660 + }, + { + "epoch": 2.921734429900768, + "grad_norm": 0.6988056302070618, + "learning_rate": 5.220278775242287e-06, + "loss": 0.1584, + "step": 75670 + }, + { + "epoch": 2.922120545194795, + "grad_norm": 1.766686201095581, + "learning_rate": 5.194537755640501e-06, + "loss": 0.1888, + "step": 75680 + }, + { + "epoch": 2.922506660488822, + "grad_norm": 1.17173433303833, + "learning_rate": 5.168796736038715e-06, + "loss": 0.1362, + "step": 75690 + }, + { + "epoch": 2.9228927757828487, + "grad_norm": 0.10190659016370773, + "learning_rate": 5.143055716436929e-06, + "loss": 0.1048, + "step": 75700 + }, + { + "epoch": 2.9232788910768757, + "grad_norm": 0.24241623282432556, + "learning_rate": 5.1173146968351415e-06, + "loss": 0.0925, + "step": 75710 + }, + { + "epoch": 2.923665006370902, + "grad_norm": 0.027136487886309624, + "learning_rate": 5.091573677233356e-06, + "loss": 0.1881, + "step": 75720 + }, + { + "epoch": 2.924051121664929, + "grad_norm": 0.7799992561340332, + "learning_rate": 5.065832657631569e-06, + "loss": 0.1733, + "step": 75730 + }, + { + "epoch": 2.9244372369589557, + "grad_norm": 0.11681391298770905, + "learning_rate": 5.040091638029783e-06, + "loss": 0.0768, + "step": 75740 + }, + { + "epoch": 2.9248233522529827, + "grad_norm": 3.007784366607666, + "learning_rate": 5.014350618427996e-06, + "loss": 0.2014, + "step": 75750 + }, + { + "epoch": 2.9252094675470097, + "grad_norm": 0.8907320499420166, + "learning_rate": 4.98860959882621e-06, + "loss": 0.3129, + "step": 75760 + }, + { + "epoch": 2.9255955828410363, + "grad_norm": 0.960918128490448, + "learning_rate": 4.962868579224423e-06, + "loss": 0.0904, + "step": 75770 + }, + { + "epoch": 2.9259816981350633, + "grad_norm": 1.1451547145843506, + "learning_rate": 4.937127559622637e-06, + "loss": 0.1966, + "step": 75780 + }, + { + "epoch": 2.92636781342909, + "grad_norm": 0.0598277747631073, + "learning_rate": 4.9113865400208505e-06, + "loss": 0.178, + "step": 75790 + }, + { + "epoch": 2.926753928723117, + "grad_norm": 2.012179374694824, + "learning_rate": 4.885645520419064e-06, + "loss": 0.1208, + "step": 75800 + }, + { + "epoch": 2.9271400440171433, + "grad_norm": 0.29047757387161255, + "learning_rate": 4.859904500817278e-06, + "loss": 0.1803, + "step": 75810 + }, + { + "epoch": 2.9275261593111703, + "grad_norm": 0.23592634499073029, + "learning_rate": 4.834163481215491e-06, + "loss": 0.1307, + "step": 75820 + }, + { + "epoch": 2.9279122746051973, + "grad_norm": 0.6524437069892883, + "learning_rate": 4.808422461613705e-06, + "loss": 0.1102, + "step": 75830 + }, + { + "epoch": 2.928298389899224, + "grad_norm": 4.513589382171631, + "learning_rate": 4.782681442011918e-06, + "loss": 0.3142, + "step": 75840 + }, + { + "epoch": 2.928684505193251, + "grad_norm": 0.2516826093196869, + "learning_rate": 4.756940422410132e-06, + "loss": 0.1691, + "step": 75850 + }, + { + "epoch": 2.9290706204872774, + "grad_norm": 0.05350786820054054, + "learning_rate": 4.731199402808345e-06, + "loss": 0.1919, + "step": 75860 + }, + { + "epoch": 2.9294567357813044, + "grad_norm": 0.23492521047592163, + "learning_rate": 4.705458383206559e-06, + "loss": 0.1481, + "step": 75870 + }, + { + "epoch": 2.929842851075331, + "grad_norm": 1.1959890127182007, + "learning_rate": 4.679717363604773e-06, + "loss": 0.1008, + "step": 75880 + }, + { + "epoch": 2.930228966369358, + "grad_norm": 1.4260644912719727, + "learning_rate": 4.653976344002986e-06, + "loss": 0.0918, + "step": 75890 + }, + { + "epoch": 2.930615081663385, + "grad_norm": 1.236479640007019, + "learning_rate": 4.6282353244011995e-06, + "loss": 0.0863, + "step": 75900 + }, + { + "epoch": 2.9310011969574115, + "grad_norm": 1.4055296182632446, + "learning_rate": 4.602494304799413e-06, + "loss": 0.176, + "step": 75910 + }, + { + "epoch": 2.931387312251438, + "grad_norm": 1.5062698125839233, + "learning_rate": 4.576753285197627e-06, + "loss": 0.1485, + "step": 75920 + }, + { + "epoch": 2.931773427545465, + "grad_norm": 0.586919367313385, + "learning_rate": 4.551012265595841e-06, + "loss": 0.1995, + "step": 75930 + }, + { + "epoch": 2.932159542839492, + "grad_norm": 0.755504310131073, + "learning_rate": 4.525271245994054e-06, + "loss": 0.1654, + "step": 75940 + }, + { + "epoch": 2.9325456581335185, + "grad_norm": 0.12576620280742645, + "learning_rate": 4.499530226392268e-06, + "loss": 0.0948, + "step": 75950 + }, + { + "epoch": 2.9329317734275455, + "grad_norm": 0.9442972540855408, + "learning_rate": 4.473789206790481e-06, + "loss": 0.2244, + "step": 75960 + }, + { + "epoch": 2.9333178887215725, + "grad_norm": 0.026888804510235786, + "learning_rate": 4.448048187188695e-06, + "loss": 0.0444, + "step": 75970 + }, + { + "epoch": 2.933704004015599, + "grad_norm": 0.789533257484436, + "learning_rate": 4.422307167586908e-06, + "loss": 0.1891, + "step": 75980 + }, + { + "epoch": 2.9340901193096256, + "grad_norm": 1.3214176893234253, + "learning_rate": 4.396566147985122e-06, + "loss": 0.1072, + "step": 75990 + }, + { + "epoch": 2.9344762346036526, + "grad_norm": 1.0256754159927368, + "learning_rate": 4.370825128383335e-06, + "loss": 0.198, + "step": 76000 + }, + { + "epoch": 2.9348623498976796, + "grad_norm": 0.25208428502082825, + "learning_rate": 4.345084108781549e-06, + "loss": 0.1881, + "step": 76010 + }, + { + "epoch": 2.935248465191706, + "grad_norm": 1.5642906427383423, + "learning_rate": 4.319343089179763e-06, + "loss": 0.1642, + "step": 76020 + }, + { + "epoch": 2.935634580485733, + "grad_norm": 0.7479145526885986, + "learning_rate": 4.293602069577976e-06, + "loss": 0.1752, + "step": 76030 + }, + { + "epoch": 2.93602069577976, + "grad_norm": 0.16477616131305695, + "learning_rate": 4.26786104997619e-06, + "loss": 0.1692, + "step": 76040 + }, + { + "epoch": 2.9364068110737866, + "grad_norm": 1.2980380058288574, + "learning_rate": 4.242120030374403e-06, + "loss": 0.0983, + "step": 76050 + }, + { + "epoch": 2.936792926367813, + "grad_norm": 1.4535443782806396, + "learning_rate": 4.216379010772617e-06, + "loss": 0.1804, + "step": 76060 + }, + { + "epoch": 2.93717904166184, + "grad_norm": 0.6782435774803162, + "learning_rate": 4.19063799117083e-06, + "loss": 0.0837, + "step": 76070 + }, + { + "epoch": 2.937565156955867, + "grad_norm": 0.9832270741462708, + "learning_rate": 4.164896971569044e-06, + "loss": 0.2864, + "step": 76080 + }, + { + "epoch": 2.9379512722498937, + "grad_norm": 1.398342251777649, + "learning_rate": 4.1391559519672575e-06, + "loss": 0.1506, + "step": 76090 + }, + { + "epoch": 2.9383373875439207, + "grad_norm": 1.2533401250839233, + "learning_rate": 4.113414932365471e-06, + "loss": 0.1198, + "step": 76100 + }, + { + "epoch": 2.9387235028379473, + "grad_norm": 3.0472609996795654, + "learning_rate": 4.087673912763685e-06, + "loss": 0.1057, + "step": 76110 + }, + { + "epoch": 2.9391096181319742, + "grad_norm": 0.6309196352958679, + "learning_rate": 4.061932893161898e-06, + "loss": 0.043, + "step": 76120 + }, + { + "epoch": 2.939495733426001, + "grad_norm": 0.6146018505096436, + "learning_rate": 4.036191873560112e-06, + "loss": 0.2575, + "step": 76130 + }, + { + "epoch": 2.939881848720028, + "grad_norm": 0.9969425797462463, + "learning_rate": 4.010450853958325e-06, + "loss": 0.0714, + "step": 76140 + }, + { + "epoch": 2.9402679640140548, + "grad_norm": 1.1078910827636719, + "learning_rate": 3.984709834356539e-06, + "loss": 0.058, + "step": 76150 + }, + { + "epoch": 2.9406540793080813, + "grad_norm": 0.45345064997673035, + "learning_rate": 3.958968814754753e-06, + "loss": 0.2731, + "step": 76160 + }, + { + "epoch": 2.9410401946021083, + "grad_norm": 0.6446991562843323, + "learning_rate": 3.9332277951529665e-06, + "loss": 0.0851, + "step": 76170 + }, + { + "epoch": 2.941426309896135, + "grad_norm": 0.5553757548332214, + "learning_rate": 3.90748677555118e-06, + "loss": 0.1686, + "step": 76180 + }, + { + "epoch": 2.941812425190162, + "grad_norm": 0.46511319279670715, + "learning_rate": 3.881745755949393e-06, + "loss": 0.3051, + "step": 76190 + }, + { + "epoch": 2.9421985404841884, + "grad_norm": 0.7858741283416748, + "learning_rate": 3.856004736347607e-06, + "loss": 0.0879, + "step": 76200 + }, + { + "epoch": 2.9425846557782154, + "grad_norm": 2.4360992908477783, + "learning_rate": 3.830263716745821e-06, + "loss": 0.351, + "step": 76210 + }, + { + "epoch": 2.9429707710722424, + "grad_norm": 0.8845987915992737, + "learning_rate": 3.804522697144034e-06, + "loss": 0.1261, + "step": 76220 + }, + { + "epoch": 2.943356886366269, + "grad_norm": 0.07230502367019653, + "learning_rate": 3.7787816775422473e-06, + "loss": 0.1706, + "step": 76230 + }, + { + "epoch": 2.943743001660296, + "grad_norm": 1.0335034132003784, + "learning_rate": 3.753040657940461e-06, + "loss": 0.1003, + "step": 76240 + }, + { + "epoch": 2.9441291169543224, + "grad_norm": 0.17918971180915833, + "learning_rate": 3.727299638338675e-06, + "loss": 0.1177, + "step": 76250 + }, + { + "epoch": 2.9445152322483494, + "grad_norm": 0.28648892045021057, + "learning_rate": 3.701558618736888e-06, + "loss": 0.1744, + "step": 76260 + }, + { + "epoch": 2.944901347542376, + "grad_norm": 2.5599138736724854, + "learning_rate": 3.675817599135102e-06, + "loss": 0.15, + "step": 76270 + }, + { + "epoch": 2.945287462836403, + "grad_norm": 0.5117394924163818, + "learning_rate": 3.650076579533315e-06, + "loss": 0.1638, + "step": 76280 + }, + { + "epoch": 2.94567357813043, + "grad_norm": 1.0782241821289062, + "learning_rate": 3.6243355599315292e-06, + "loss": 0.326, + "step": 76290 + }, + { + "epoch": 2.9460596934244565, + "grad_norm": 0.049037184566259384, + "learning_rate": 3.598594540329742e-06, + "loss": 0.098, + "step": 76300 + }, + { + "epoch": 2.9464458087184835, + "grad_norm": 0.6411057710647583, + "learning_rate": 3.5728535207279563e-06, + "loss": 0.1082, + "step": 76310 + }, + { + "epoch": 2.94683192401251, + "grad_norm": 0.4438591003417969, + "learning_rate": 3.54711250112617e-06, + "loss": 0.1103, + "step": 76320 + }, + { + "epoch": 2.947218039306537, + "grad_norm": 0.2660674452781677, + "learning_rate": 3.5213714815243833e-06, + "loss": 0.1961, + "step": 76330 + }, + { + "epoch": 2.9476041546005636, + "grad_norm": 2.3970565795898438, + "learning_rate": 3.495630461922597e-06, + "loss": 0.2066, + "step": 76340 + }, + { + "epoch": 2.9479902698945906, + "grad_norm": 0.5123302936553955, + "learning_rate": 3.4698894423208104e-06, + "loss": 0.2231, + "step": 76350 + }, + { + "epoch": 2.9483763851886176, + "grad_norm": 0.026459665969014168, + "learning_rate": 3.444148422719024e-06, + "loss": 0.1865, + "step": 76360 + }, + { + "epoch": 2.948762500482644, + "grad_norm": 1.8527942895889282, + "learning_rate": 3.4184074031172374e-06, + "loss": 0.3425, + "step": 76370 + }, + { + "epoch": 2.9491486157766706, + "grad_norm": 0.3126128315925598, + "learning_rate": 3.392666383515451e-06, + "loss": 0.1047, + "step": 76380 + }, + { + "epoch": 2.9495347310706976, + "grad_norm": 0.14081552624702454, + "learning_rate": 3.3669253639136653e-06, + "loss": 0.1987, + "step": 76390 + }, + { + "epoch": 2.9499208463647246, + "grad_norm": 0.7128289341926575, + "learning_rate": 3.3411843443118782e-06, + "loss": 0.1398, + "step": 76400 + }, + { + "epoch": 2.950306961658751, + "grad_norm": 1.8297266960144043, + "learning_rate": 3.3154433247100924e-06, + "loss": 0.1874, + "step": 76410 + }, + { + "epoch": 2.950693076952778, + "grad_norm": 2.2352466583251953, + "learning_rate": 3.2897023051083053e-06, + "loss": 0.1561, + "step": 76420 + }, + { + "epoch": 2.951079192246805, + "grad_norm": 0.9707785844802856, + "learning_rate": 3.2639612855065194e-06, + "loss": 0.2287, + "step": 76430 + }, + { + "epoch": 2.9514653075408317, + "grad_norm": 1.408307433128357, + "learning_rate": 3.2382202659047323e-06, + "loss": 0.2413, + "step": 76440 + }, + { + "epoch": 2.9518514228348582, + "grad_norm": 1.706040382385254, + "learning_rate": 3.2124792463029465e-06, + "loss": 0.1153, + "step": 76450 + }, + { + "epoch": 2.9522375381288852, + "grad_norm": 1.6742459535598755, + "learning_rate": 3.1867382267011594e-06, + "loss": 0.1259, + "step": 76460 + }, + { + "epoch": 2.952623653422912, + "grad_norm": 2.4881958961486816, + "learning_rate": 3.1609972070993735e-06, + "loss": 0.2058, + "step": 76470 + }, + { + "epoch": 2.9530097687169388, + "grad_norm": 0.09501784294843674, + "learning_rate": 3.1352561874975872e-06, + "loss": 0.1849, + "step": 76480 + }, + { + "epoch": 2.9533958840109658, + "grad_norm": 0.6468241214752197, + "learning_rate": 3.1095151678958006e-06, + "loss": 0.3005, + "step": 76490 + }, + { + "epoch": 2.9537819993049927, + "grad_norm": 0.3400740921497345, + "learning_rate": 3.083774148294014e-06, + "loss": 0.1335, + "step": 76500 + }, + { + "epoch": 2.9541681145990193, + "grad_norm": 0.841686487197876, + "learning_rate": 3.0580331286922276e-06, + "loss": 0.1571, + "step": 76510 + }, + { + "epoch": 2.954554229893046, + "grad_norm": 0.3212125301361084, + "learning_rate": 3.0322921090904413e-06, + "loss": 0.1652, + "step": 76520 + }, + { + "epoch": 2.954940345187073, + "grad_norm": 1.0158278942108154, + "learning_rate": 3.0065510894886546e-06, + "loss": 0.255, + "step": 76530 + }, + { + "epoch": 2.9553264604811, + "grad_norm": 2.152977466583252, + "learning_rate": 2.9808100698868684e-06, + "loss": 0.2047, + "step": 76540 + }, + { + "epoch": 2.9557125757751264, + "grad_norm": 1.1242973804473877, + "learning_rate": 2.9550690502850817e-06, + "loss": 0.2116, + "step": 76550 + }, + { + "epoch": 2.9560986910691534, + "grad_norm": 2.271522045135498, + "learning_rate": 2.9293280306832954e-06, + "loss": 0.2411, + "step": 76560 + }, + { + "epoch": 2.95648480636318, + "grad_norm": 2.5018372535705566, + "learning_rate": 2.9035870110815087e-06, + "loss": 0.2311, + "step": 76570 + }, + { + "epoch": 2.956870921657207, + "grad_norm": 2.2592756748199463, + "learning_rate": 2.877845991479723e-06, + "loss": 0.1271, + "step": 76580 + }, + { + "epoch": 2.9572570369512334, + "grad_norm": 1.9872472286224365, + "learning_rate": 2.8521049718779362e-06, + "loss": 0.2476, + "step": 76590 + }, + { + "epoch": 2.9576431522452604, + "grad_norm": 0.44302454590797424, + "learning_rate": 2.82636395227615e-06, + "loss": 0.1617, + "step": 76600 + }, + { + "epoch": 2.9580292675392874, + "grad_norm": 0.10741741210222244, + "learning_rate": 2.8006229326743633e-06, + "loss": 0.1844, + "step": 76610 + }, + { + "epoch": 2.958415382833314, + "grad_norm": 0.7484311461448669, + "learning_rate": 2.774881913072577e-06, + "loss": 0.1211, + "step": 76620 + }, + { + "epoch": 2.958801498127341, + "grad_norm": 1.170832633972168, + "learning_rate": 2.7491408934707903e-06, + "loss": 0.2335, + "step": 76630 + }, + { + "epoch": 2.9591876134213675, + "grad_norm": 1.429801344871521, + "learning_rate": 2.723399873869004e-06, + "loss": 0.1425, + "step": 76640 + }, + { + "epoch": 2.9595737287153945, + "grad_norm": 1.5668152570724487, + "learning_rate": 2.6976588542672174e-06, + "loss": 0.0826, + "step": 76650 + }, + { + "epoch": 2.959959844009421, + "grad_norm": 0.4563259482383728, + "learning_rate": 2.6719178346654315e-06, + "loss": 0.1126, + "step": 76660 + }, + { + "epoch": 2.960345959303448, + "grad_norm": 0.0956023558974266, + "learning_rate": 2.646176815063645e-06, + "loss": 0.0703, + "step": 76670 + }, + { + "epoch": 2.960732074597475, + "grad_norm": 0.34736737608909607, + "learning_rate": 2.6204357954618586e-06, + "loss": 0.2169, + "step": 76680 + }, + { + "epoch": 2.9611181898915016, + "grad_norm": 0.6178987622261047, + "learning_rate": 2.594694775860072e-06, + "loss": 0.1367, + "step": 76690 + }, + { + "epoch": 2.9615043051855285, + "grad_norm": 0.2700929045677185, + "learning_rate": 2.5689537562582856e-06, + "loss": 0.1022, + "step": 76700 + }, + { + "epoch": 2.961890420479555, + "grad_norm": 0.9162507653236389, + "learning_rate": 2.543212736656499e-06, + "loss": 0.1318, + "step": 76710 + }, + { + "epoch": 2.962276535773582, + "grad_norm": 1.0987880229949951, + "learning_rate": 2.5174717170547126e-06, + "loss": 0.1194, + "step": 76720 + }, + { + "epoch": 2.9626626510676086, + "grad_norm": 1.9178944826126099, + "learning_rate": 2.491730697452926e-06, + "loss": 0.1422, + "step": 76730 + }, + { + "epoch": 2.9630487663616356, + "grad_norm": 0.029073640704154968, + "learning_rate": 2.46598967785114e-06, + "loss": 0.1077, + "step": 76740 + }, + { + "epoch": 2.9634348816556626, + "grad_norm": 0.3460249900817871, + "learning_rate": 2.4402486582493534e-06, + "loss": 0.193, + "step": 76750 + }, + { + "epoch": 2.963820996949689, + "grad_norm": 0.08659185469150543, + "learning_rate": 2.414507638647567e-06, + "loss": 0.1638, + "step": 76760 + }, + { + "epoch": 2.964207112243716, + "grad_norm": 0.3209403455257416, + "learning_rate": 2.3887666190457805e-06, + "loss": 0.1265, + "step": 76770 + }, + { + "epoch": 2.9645932275377427, + "grad_norm": 2.6390767097473145, + "learning_rate": 2.363025599443994e-06, + "loss": 0.2551, + "step": 76780 + }, + { + "epoch": 2.9649793428317697, + "grad_norm": 1.400255799293518, + "learning_rate": 2.3372845798422075e-06, + "loss": 0.1893, + "step": 76790 + }, + { + "epoch": 2.965365458125796, + "grad_norm": 1.8555853366851807, + "learning_rate": 2.3115435602404213e-06, + "loss": 0.155, + "step": 76800 + }, + { + "epoch": 2.965751573419823, + "grad_norm": 1.8044438362121582, + "learning_rate": 2.2858025406386346e-06, + "loss": 0.3129, + "step": 76810 + }, + { + "epoch": 2.96613768871385, + "grad_norm": 2.4024667739868164, + "learning_rate": 2.2600615210368483e-06, + "loss": 0.1965, + "step": 76820 + }, + { + "epoch": 2.9665238040078767, + "grad_norm": 2.0369980335235596, + "learning_rate": 2.234320501435062e-06, + "loss": 0.1335, + "step": 76830 + }, + { + "epoch": 2.9669099193019033, + "grad_norm": 1.8668415546417236, + "learning_rate": 2.2085794818332758e-06, + "loss": 0.0668, + "step": 76840 + }, + { + "epoch": 2.9672960345959303, + "grad_norm": 0.18365631997585297, + "learning_rate": 2.182838462231489e-06, + "loss": 0.0758, + "step": 76850 + }, + { + "epoch": 2.9676821498899573, + "grad_norm": 2.2368826866149902, + "learning_rate": 2.157097442629703e-06, + "loss": 0.1002, + "step": 76860 + }, + { + "epoch": 2.968068265183984, + "grad_norm": 1.877583622932434, + "learning_rate": 2.131356423027916e-06, + "loss": 0.1702, + "step": 76870 + }, + { + "epoch": 2.968454380478011, + "grad_norm": 0.635735273361206, + "learning_rate": 2.10561540342613e-06, + "loss": 0.1851, + "step": 76880 + }, + { + "epoch": 2.968840495772038, + "grad_norm": 2.5163917541503906, + "learning_rate": 2.0798743838243436e-06, + "loss": 0.0917, + "step": 76890 + }, + { + "epoch": 2.9692266110660643, + "grad_norm": 0.0662187710404396, + "learning_rate": 2.054133364222557e-06, + "loss": 0.0544, + "step": 76900 + }, + { + "epoch": 2.969612726360091, + "grad_norm": 0.8235715627670288, + "learning_rate": 2.0283923446207706e-06, + "loss": 0.1892, + "step": 76910 + }, + { + "epoch": 2.969998841654118, + "grad_norm": 0.11744660139083862, + "learning_rate": 2.002651325018984e-06, + "loss": 0.0543, + "step": 76920 + }, + { + "epoch": 2.970384956948145, + "grad_norm": 2.3874759674072266, + "learning_rate": 1.9769103054171977e-06, + "loss": 0.1903, + "step": 76930 + }, + { + "epoch": 2.9707710722421714, + "grad_norm": 1.0060522556304932, + "learning_rate": 1.951169285815411e-06, + "loss": 0.1119, + "step": 76940 + }, + { + "epoch": 2.9711571875361984, + "grad_norm": 1.8163440227508545, + "learning_rate": 1.9254282662136247e-06, + "loss": 0.1515, + "step": 76950 + }, + { + "epoch": 2.971543302830225, + "grad_norm": 0.9836590886116028, + "learning_rate": 1.8996872466118383e-06, + "loss": 0.1354, + "step": 76960 + }, + { + "epoch": 2.971929418124252, + "grad_norm": 0.4806680679321289, + "learning_rate": 1.8739462270100522e-06, + "loss": 0.1058, + "step": 76970 + }, + { + "epoch": 2.9723155334182785, + "grad_norm": 0.043541885912418365, + "learning_rate": 1.8482052074082657e-06, + "loss": 0.1007, + "step": 76980 + }, + { + "epoch": 2.9727016487123055, + "grad_norm": 0.2944481074810028, + "learning_rate": 1.8224641878064793e-06, + "loss": 0.1376, + "step": 76990 + }, + { + "epoch": 2.9730877640063325, + "grad_norm": 1.73750901222229, + "learning_rate": 1.7967231682046928e-06, + "loss": 0.1875, + "step": 77000 + }, + { + "epoch": 2.973473879300359, + "grad_norm": 0.7777722477912903, + "learning_rate": 1.7709821486029063e-06, + "loss": 0.2761, + "step": 77010 + }, + { + "epoch": 2.973859994594386, + "grad_norm": 1.6995668411254883, + "learning_rate": 1.7452411290011198e-06, + "loss": 0.0507, + "step": 77020 + }, + { + "epoch": 2.9742461098884125, + "grad_norm": 2.498788833618164, + "learning_rate": 1.7195001093993333e-06, + "loss": 0.1609, + "step": 77030 + }, + { + "epoch": 2.9746322251824395, + "grad_norm": 1.8842010498046875, + "learning_rate": 1.6937590897975469e-06, + "loss": 0.1728, + "step": 77040 + }, + { + "epoch": 2.975018340476466, + "grad_norm": 1.1742241382598877, + "learning_rate": 1.6680180701957606e-06, + "loss": 0.1388, + "step": 77050 + }, + { + "epoch": 2.975404455770493, + "grad_norm": 0.616058886051178, + "learning_rate": 1.6422770505939741e-06, + "loss": 0.092, + "step": 77060 + }, + { + "epoch": 2.97579057106452, + "grad_norm": 0.2943461239337921, + "learning_rate": 1.6165360309921877e-06, + "loss": 0.0929, + "step": 77070 + }, + { + "epoch": 2.9761766863585466, + "grad_norm": 0.9972903728485107, + "learning_rate": 1.5907950113904012e-06, + "loss": 0.091, + "step": 77080 + }, + { + "epoch": 2.9765628016525736, + "grad_norm": 0.3846336901187897, + "learning_rate": 1.5650539917886147e-06, + "loss": 0.1561, + "step": 77090 + }, + { + "epoch": 2.9769489169466, + "grad_norm": 2.607909917831421, + "learning_rate": 1.5393129721868282e-06, + "loss": 0.2393, + "step": 77100 + }, + { + "epoch": 2.977335032240627, + "grad_norm": 0.7278031706809998, + "learning_rate": 1.513571952585042e-06, + "loss": 0.1189, + "step": 77110 + }, + { + "epoch": 2.9777211475346537, + "grad_norm": 1.9696396589279175, + "learning_rate": 1.4878309329832555e-06, + "loss": 0.1954, + "step": 77120 + }, + { + "epoch": 2.9781072628286807, + "grad_norm": 1.2762713432312012, + "learning_rate": 1.462089913381469e-06, + "loss": 0.2617, + "step": 77130 + }, + { + "epoch": 2.9784933781227076, + "grad_norm": 1.557726502418518, + "learning_rate": 1.4363488937796827e-06, + "loss": 0.2751, + "step": 77140 + }, + { + "epoch": 2.978879493416734, + "grad_norm": 0.5568321347236633, + "learning_rate": 1.4106078741778963e-06, + "loss": 0.2168, + "step": 77150 + }, + { + "epoch": 2.979265608710761, + "grad_norm": 2.3710832595825195, + "learning_rate": 1.3848668545761098e-06, + "loss": 0.244, + "step": 77160 + }, + { + "epoch": 2.9796517240047877, + "grad_norm": 1.4126112461090088, + "learning_rate": 1.3591258349743233e-06, + "loss": 0.1546, + "step": 77170 + }, + { + "epoch": 2.9800378392988147, + "grad_norm": 0.4143407940864563, + "learning_rate": 1.333384815372537e-06, + "loss": 0.1715, + "step": 77180 + }, + { + "epoch": 2.9804239545928413, + "grad_norm": 0.4667656719684601, + "learning_rate": 1.3076437957707506e-06, + "loss": 0.144, + "step": 77190 + }, + { + "epoch": 2.9808100698868683, + "grad_norm": 2.8623580932617188, + "learning_rate": 1.281902776168964e-06, + "loss": 0.1881, + "step": 77200 + }, + { + "epoch": 2.9811961851808952, + "grad_norm": 1.9620566368103027, + "learning_rate": 1.2561617565671776e-06, + "loss": 0.196, + "step": 77210 + }, + { + "epoch": 2.981582300474922, + "grad_norm": 0.6524580717086792, + "learning_rate": 1.2304207369653913e-06, + "loss": 0.0423, + "step": 77220 + }, + { + "epoch": 2.9819684157689483, + "grad_norm": 0.09811830520629883, + "learning_rate": 1.2046797173636049e-06, + "loss": 0.0493, + "step": 77230 + }, + { + "epoch": 2.9823545310629753, + "grad_norm": 0.45934444665908813, + "learning_rate": 1.1789386977618184e-06, + "loss": 0.044, + "step": 77240 + }, + { + "epoch": 2.9827406463570023, + "grad_norm": 1.32642662525177, + "learning_rate": 1.153197678160032e-06, + "loss": 0.1132, + "step": 77250 + }, + { + "epoch": 2.983126761651029, + "grad_norm": 0.5210559964179993, + "learning_rate": 1.1274566585582457e-06, + "loss": 0.1504, + "step": 77260 + }, + { + "epoch": 2.983512876945056, + "grad_norm": 0.6723094582557678, + "learning_rate": 1.1017156389564592e-06, + "loss": 0.0915, + "step": 77270 + }, + { + "epoch": 2.983898992239083, + "grad_norm": 1.8885656595230103, + "learning_rate": 1.0759746193546727e-06, + "loss": 0.1754, + "step": 77280 + }, + { + "epoch": 2.9842851075331094, + "grad_norm": 0.7771125435829163, + "learning_rate": 1.0502335997528862e-06, + "loss": 0.1099, + "step": 77290 + }, + { + "epoch": 2.984671222827136, + "grad_norm": 0.3368057310581207, + "learning_rate": 1.0244925801511e-06, + "loss": 0.2531, + "step": 77300 + }, + { + "epoch": 2.985057338121163, + "grad_norm": 1.5150898694992065, + "learning_rate": 9.987515605493135e-07, + "loss": 0.1183, + "step": 77310 + }, + { + "epoch": 2.98544345341519, + "grad_norm": 1.127661943435669, + "learning_rate": 9.73010540947527e-07, + "loss": 0.0798, + "step": 77320 + }, + { + "epoch": 2.9858295687092165, + "grad_norm": 0.38720184564590454, + "learning_rate": 9.472695213457404e-07, + "loss": 0.224, + "step": 77330 + }, + { + "epoch": 2.9862156840032434, + "grad_norm": 2.868779182434082, + "learning_rate": 9.215285017439542e-07, + "loss": 0.1443, + "step": 77340 + }, + { + "epoch": 2.9866017992972704, + "grad_norm": 1.1396832466125488, + "learning_rate": 8.957874821421677e-07, + "loss": 0.1854, + "step": 77350 + }, + { + "epoch": 2.986987914591297, + "grad_norm": 1.5037932395935059, + "learning_rate": 8.700464625403812e-07, + "loss": 0.1981, + "step": 77360 + }, + { + "epoch": 2.9873740298853235, + "grad_norm": 2.0281424522399902, + "learning_rate": 8.443054429385947e-07, + "loss": 0.1497, + "step": 77370 + }, + { + "epoch": 2.9877601451793505, + "grad_norm": 0.9007585048675537, + "learning_rate": 8.185644233368085e-07, + "loss": 0.1785, + "step": 77380 + }, + { + "epoch": 2.9881462604733775, + "grad_norm": 1.5973069667816162, + "learning_rate": 7.92823403735022e-07, + "loss": 0.2503, + "step": 77390 + }, + { + "epoch": 2.988532375767404, + "grad_norm": 1.3147287368774414, + "learning_rate": 7.670823841332356e-07, + "loss": 0.1826, + "step": 77400 + }, + { + "epoch": 2.988918491061431, + "grad_norm": 1.1539599895477295, + "learning_rate": 7.413413645314491e-07, + "loss": 0.1909, + "step": 77410 + }, + { + "epoch": 2.9893046063554576, + "grad_norm": 1.8404300212860107, + "learning_rate": 7.156003449296628e-07, + "loss": 0.1325, + "step": 77420 + }, + { + "epoch": 2.9896907216494846, + "grad_norm": 0.768785834312439, + "learning_rate": 6.898593253278763e-07, + "loss": 0.2586, + "step": 77430 + }, + { + "epoch": 2.990076836943511, + "grad_norm": 1.2869620323181152, + "learning_rate": 6.641183057260899e-07, + "loss": 0.4009, + "step": 77440 + }, + { + "epoch": 2.990462952237538, + "grad_norm": 0.19744427502155304, + "learning_rate": 6.383772861243034e-07, + "loss": 0.135, + "step": 77450 + }, + { + "epoch": 2.990849067531565, + "grad_norm": 0.26801246404647827, + "learning_rate": 6.126362665225171e-07, + "loss": 0.1728, + "step": 77460 + }, + { + "epoch": 2.9912351828255916, + "grad_norm": 0.010998820886015892, + "learning_rate": 5.868952469207306e-07, + "loss": 0.0807, + "step": 77470 + }, + { + "epoch": 2.9916212981196186, + "grad_norm": 0.11696690320968628, + "learning_rate": 5.611542273189441e-07, + "loss": 0.1431, + "step": 77480 + }, + { + "epoch": 2.992007413413645, + "grad_norm": 0.16014862060546875, + "learning_rate": 5.354132077171576e-07, + "loss": 0.0245, + "step": 77490 + }, + { + "epoch": 2.992393528707672, + "grad_norm": 0.01859739050269127, + "learning_rate": 5.096721881153713e-07, + "loss": 0.0767, + "step": 77500 + }, + { + "epoch": 2.9927796440016987, + "grad_norm": 2.0091257095336914, + "learning_rate": 4.839311685135848e-07, + "loss": 0.1692, + "step": 77510 + }, + { + "epoch": 2.9931657592957257, + "grad_norm": 0.18971773982048035, + "learning_rate": 4.581901489117984e-07, + "loss": 0.1466, + "step": 77520 + }, + { + "epoch": 2.9935518745897527, + "grad_norm": 1.6025941371917725, + "learning_rate": 4.3244912931001194e-07, + "loss": 0.1006, + "step": 77530 + }, + { + "epoch": 2.9939379898837792, + "grad_norm": 3.3034281730651855, + "learning_rate": 4.0670810970822557e-07, + "loss": 0.2355, + "step": 77540 + }, + { + "epoch": 2.9943241051778062, + "grad_norm": 1.5808985233306885, + "learning_rate": 3.8096709010643915e-07, + "loss": 0.1865, + "step": 77550 + }, + { + "epoch": 2.9947102204718328, + "grad_norm": 0.44882145524024963, + "learning_rate": 3.552260705046527e-07, + "loss": 0.0814, + "step": 77560 + }, + { + "epoch": 2.9950963357658598, + "grad_norm": 0.21992957592010498, + "learning_rate": 3.294850509028663e-07, + "loss": 0.2062, + "step": 77570 + }, + { + "epoch": 2.9954824510598863, + "grad_norm": 0.35506996512413025, + "learning_rate": 3.037440313010799e-07, + "loss": 0.1015, + "step": 77580 + }, + { + "epoch": 2.9958685663539133, + "grad_norm": 0.06529080867767334, + "learning_rate": 2.7800301169929345e-07, + "loss": 0.138, + "step": 77590 + }, + { + "epoch": 2.9962546816479403, + "grad_norm": 1.855838656425476, + "learning_rate": 2.52261992097507e-07, + "loss": 0.1171, + "step": 77600 + }, + { + "epoch": 2.996640796941967, + "grad_norm": 0.20416900515556335, + "learning_rate": 2.2652097249572058e-07, + "loss": 0.0647, + "step": 77610 + }, + { + "epoch": 2.997026912235994, + "grad_norm": 0.7795670032501221, + "learning_rate": 2.0077995289393415e-07, + "loss": 0.2346, + "step": 77620 + }, + { + "epoch": 2.9974130275300204, + "grad_norm": 0.12493914365768433, + "learning_rate": 1.750389332921477e-07, + "loss": 0.1291, + "step": 77630 + }, + { + "epoch": 2.9977991428240474, + "grad_norm": 1.76205313205719, + "learning_rate": 1.4929791369036128e-07, + "loss": 0.1026, + "step": 77640 + }, + { + "epoch": 2.998185258118074, + "grad_norm": 0.5991857647895813, + "learning_rate": 1.2355689408857486e-07, + "loss": 0.1125, + "step": 77650 + }, + { + "epoch": 2.998571373412101, + "grad_norm": 0.6876686215400696, + "learning_rate": 9.781587448678842e-08, + "loss": 0.122, + "step": 77660 + }, + { + "epoch": 2.998957488706128, + "grad_norm": 1.4319281578063965, + "learning_rate": 7.2074854885002e-08, + "loss": 0.1069, + "step": 77670 + }, + { + "epoch": 2.9993436040001544, + "grad_norm": 0.8816627860069275, + "learning_rate": 4.633383528321557e-08, + "loss": 0.1775, + "step": 77680 + }, + { + "epoch": 2.999729719294181, + "grad_norm": 1.0448139905929565, + "learning_rate": 2.0592815681429144e-08, + "loss": 0.3063, + "step": 77690 + } + ], + "logging_steps": 10, + "max_steps": 77697, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 4.731420122284032e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/training_args.bin b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..51ec8c074aec62c737bdc86f3c7f1d33bd5739dc Binary files /dev/null and b/HVU_QA/t5-viet-qg-finetuned/checkpoint-77697/training_args.bin differ