aczhu commited on
Commit
039d14e
·
verified ·
1 Parent(s): b1a5216

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen/Qwen2.5-14B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: sft
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # sft
18
+
19
+ This model is a fine-tuned version of [Qwen/Qwen2.5-14B-Instruct](https://huggingface.co/Qwen/Qwen2.5-14B-Instruct) on the glaive_toolcall_100k and the bespoke_reasoning_17k datasets.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 0.3492
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 1e-05
41
+ - train_batch_size: 1
42
+ - eval_batch_size: 1
43
+ - seed: 42
44
+ - distributed_type: multi-GPU
45
+ - num_devices: 7
46
+ - gradient_accumulation_steps: 2
47
+ - total_train_batch_size: 14
48
+ - total_eval_batch_size: 7
49
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
50
+ - lr_scheduler_type: cosine
51
+ - lr_scheduler_warmup_ratio: 0.1
52
+ - num_epochs: 2.0
53
+
54
+ ### Training results
55
+
56
+ | Training Loss | Epoch | Step | Validation Loss |
57
+ |:-------------:|:------:|:----:|:---------------:|
58
+ | 0.4468 | 0.2307 | 500 | 0.3987 |
59
+ | 0.4457 | 0.4614 | 1000 | 0.3861 |
60
+ | 0.4197 | 0.6920 | 1500 | 0.3745 |
61
+ | 0.4264 | 0.9227 | 2000 | 0.3640 |
62
+ | 0.3188 | 1.1532 | 2500 | 0.3638 |
63
+ | 0.2938 | 1.3839 | 3000 | 0.3572 |
64
+ | 0.2891 | 1.6145 | 3500 | 0.3523 |
65
+ | 0.3013 | 1.8452 | 4000 | 0.3492 |
66
+
67
+
68
+ ### Framework versions
69
+
70
+ - Transformers 4.49.0
71
+ - Pytorch 2.6.0+cu124
72
+ - Datasets 3.2.0
73
+ - Tokenizers 0.21.0
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.9993079584775086,
3
+ "eval_loss": 0.34922918677330017,
4
+ "eval_runtime": 406.3236,
5
+ "eval_samples_per_second": 8.296,
6
+ "eval_steps_per_second": 1.186,
7
+ "total_flos": 545751381377024.0,
8
+ "train_loss": 0.3850643413100971,
9
+ "train_runtime": 89295.2192,
10
+ "train_samples_per_second": 0.68,
11
+ "train_steps_per_second": 0.049
12
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-14B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 70,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 40,
17
+ "num_hidden_layers": 48,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": 131072,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.49.0",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
eval_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.9993079584775086,
3
+ "eval_loss": 0.34922918677330017,
4
+ "eval_runtime": 406.3236,
5
+ "eval_samples_per_second": 8.296,
6
+ "eval_steps_per_second": 1.186
7
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.49.0"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f3b8768bc10b761a7a4575b1ac3ffa13161905c550ac2db14c74b8d0013d4e3
3
+ size 4986211280
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:308c8609d61a72ca4edc885f187c1d1bce9b316a4b905c4b259e031fe2e99bd4
3
+ size 4954847344
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30322a456c3788404054447192b0ab7b41db914977f2eaf8556d9b7185aed566
3
+ size 4954847392
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:822b21fcc05e49bb75fe35d3a492fefaddbaaa57180f0fe7915f2610a6fce3f4
3
+ size 4954847392
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af82f754565ca632c05f62309b4387d134a2eeb09e4c059eff39764ee40e0ee0
3
+ size 4954847392
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e93d19b22e526c724ab8813dce36dcb797c733e74766344de222629ad136a86f
3
+ size 4734533160
model.safetensors.index.json ADDED
@@ -0,0 +1,586 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 29540067328
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00006-of-00006.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00006.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00006.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00006.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00006.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
368
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
369
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
370
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
371
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
372
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
373
+ "model.layers.36.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
374
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
375
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
376
+ "model.layers.36.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
377
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
378
+ "model.layers.36.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
379
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
380
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
381
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
382
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
383
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
384
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
385
+ "model.layers.37.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
386
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
387
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
388
+ "model.layers.37.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
389
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
390
+ "model.layers.37.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
391
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
392
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00006.safetensors",
393
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
394
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
395
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
396
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
397
+ "model.layers.38.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
398
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
399
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
400
+ "model.layers.38.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
401
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
402
+ "model.layers.38.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
403
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
404
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00006.safetensors",
405
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
406
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
407
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
408
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
409
+ "model.layers.39.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
410
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
411
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
412
+ "model.layers.39.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
413
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
414
+ "model.layers.39.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
415
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
416
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
417
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
418
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
419
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
420
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
421
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
422
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
423
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
424
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
425
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
426
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
427
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
428
+ "model.layers.40.input_layernorm.weight": "model-00005-of-00006.safetensors",
429
+ "model.layers.40.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
430
+ "model.layers.40.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
431
+ "model.layers.40.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
432
+ "model.layers.40.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
433
+ "model.layers.40.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
434
+ "model.layers.40.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
435
+ "model.layers.40.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
436
+ "model.layers.40.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
437
+ "model.layers.40.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
438
+ "model.layers.40.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
439
+ "model.layers.40.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
440
+ "model.layers.41.input_layernorm.weight": "model-00005-of-00006.safetensors",
441
+ "model.layers.41.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
442
+ "model.layers.41.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
443
+ "model.layers.41.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
444
+ "model.layers.41.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
445
+ "model.layers.41.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
446
+ "model.layers.41.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
447
+ "model.layers.41.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
448
+ "model.layers.41.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
449
+ "model.layers.41.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
450
+ "model.layers.41.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
451
+ "model.layers.41.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00006-of-00006.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
457
+ "model.layers.42.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
458
+ "model.layers.42.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
459
+ "model.layers.42.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
460
+ "model.layers.42.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
461
+ "model.layers.42.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
462
+ "model.layers.42.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
463
+ "model.layers.42.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00006-of-00006.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
476
+ "model.layers.44.input_layernorm.weight": "model-00006-of-00006.safetensors",
477
+ "model.layers.44.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
478
+ "model.layers.44.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
479
+ "model.layers.44.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
480
+ "model.layers.44.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
488
+ "model.layers.45.input_layernorm.weight": "model-00006-of-00006.safetensors",
489
+ "model.layers.45.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
490
+ "model.layers.45.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
491
+ "model.layers.45.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
492
+ "model.layers.45.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
493
+ "model.layers.45.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
494
+ "model.layers.45.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
495
+ "model.layers.45.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
496
+ "model.layers.45.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
497
+ "model.layers.45.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
498
+ "model.layers.45.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
499
+ "model.layers.45.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
500
+ "model.layers.46.input_layernorm.weight": "model-00006-of-00006.safetensors",
501
+ "model.layers.46.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
502
+ "model.layers.46.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
503
+ "model.layers.46.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
504
+ "model.layers.46.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
505
+ "model.layers.46.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
506
+ "model.layers.46.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
507
+ "model.layers.46.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
508
+ "model.layers.46.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
509
+ "model.layers.46.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
510
+ "model.layers.46.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
511
+ "model.layers.46.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
512
+ "model.layers.47.input_layernorm.weight": "model-00006-of-00006.safetensors",
513
+ "model.layers.47.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
514
+ "model.layers.47.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
515
+ "model.layers.47.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
516
+ "model.layers.47.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
517
+ "model.layers.47.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
518
+ "model.layers.47.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
519
+ "model.layers.47.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
520
+ "model.layers.47.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
521
+ "model.layers.47.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
522
+ "model.layers.47.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
523
+ "model.layers.47.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
524
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
525
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
526
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
527
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
528
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
529
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
530
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
531
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
532
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
533
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
534
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
535
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
536
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
537
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
538
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
539
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
540
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
541
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
542
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
543
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
544
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
545
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
546
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
547
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
548
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
549
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
550
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
551
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
552
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
553
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
554
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
555
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
556
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
557
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
558
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
559
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
560
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
561
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
562
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
563
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
564
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
565
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
566
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
567
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
568
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
569
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
570
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
571
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
572
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
573
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
574
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
575
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
576
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
577
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
578
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
579
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
580
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
581
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
582
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
583
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
584
+ "model.norm.weight": "model-00006-of-00006.safetensors"
585
+ }
586
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 4096,
204
+ "pad_token": "<|endoftext|>",
205
+ "padding_side": "right",
206
+ "split_special_tokens": false,
207
+ "tokenizer_class": "Qwen2Tokenizer",
208
+ "unk_token": null
209
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.9993079584775086,
3
+ "total_flos": 545751381377024.0,
4
+ "train_loss": 0.3850643413100971,
5
+ "train_runtime": 89295.2192,
6
+ "train_samples_per_second": 0.68,
7
+ "train_steps_per_second": 0.049
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 4334, "loss": 0.806, "lr": 2.3041474654377884e-07, "epoch": 0.00461361014994233, "percentage": 0.23, "elapsed_time": "0:03:36", "remaining_time": "1 day, 2:02:31"}
2
+ {"current_steps": 20, "total_steps": 4334, "loss": 0.8251, "lr": 4.608294930875577e-07, "epoch": 0.00922722029988466, "percentage": 0.46, "elapsed_time": "0:06:52", "remaining_time": "1 day, 0:42:21"}
3
+ {"current_steps": 30, "total_steps": 4334, "loss": 0.7555, "lr": 6.912442396313365e-07, "epoch": 0.01384083044982699, "percentage": 0.69, "elapsed_time": "0:10:09", "remaining_time": "1 day, 0:17:09"}
4
+ {"current_steps": 40, "total_steps": 4334, "loss": 0.7218, "lr": 9.216589861751154e-07, "epoch": 0.01845444059976932, "percentage": 0.92, "elapsed_time": "0:13:21", "remaining_time": "23:54:37"}
5
+ {"current_steps": 50, "total_steps": 4334, "loss": 0.6751, "lr": 1.1520737327188942e-06, "epoch": 0.02306805074971165, "percentage": 1.15, "elapsed_time": "0:17:02", "remaining_time": "1 day, 0:20:17"}
6
+ {"current_steps": 60, "total_steps": 4334, "loss": 0.6197, "lr": 1.382488479262673e-06, "epoch": 0.02768166089965398, "percentage": 1.38, "elapsed_time": "0:20:16", "remaining_time": "1 day, 0:04:06"}
7
+ {"current_steps": 70, "total_steps": 4334, "loss": 0.5083, "lr": 1.6129032258064516e-06, "epoch": 0.03229527104959631, "percentage": 1.62, "elapsed_time": "0:23:33", "remaining_time": "23:54:33"}
8
+ {"current_steps": 80, "total_steps": 4334, "loss": 0.5326, "lr": 1.8433179723502307e-06, "epoch": 0.03690888119953864, "percentage": 1.85, "elapsed_time": "0:26:49", "remaining_time": "23:45:58"}
9
+ {"current_steps": 90, "total_steps": 4334, "loss": 0.5463, "lr": 2.0737327188940094e-06, "epoch": 0.04152249134948097, "percentage": 2.08, "elapsed_time": "0:30:04", "remaining_time": "23:38:30"}
10
+ {"current_steps": 100, "total_steps": 4334, "loss": 0.5445, "lr": 2.3041474654377884e-06, "epoch": 0.0461361014994233, "percentage": 2.31, "elapsed_time": "0:33:23", "remaining_time": "23:33:31"}
11
+ {"current_steps": 110, "total_steps": 4334, "loss": 0.4972, "lr": 2.5345622119815673e-06, "epoch": 0.05074971164936563, "percentage": 2.54, "elapsed_time": "0:36:59", "remaining_time": "23:40:17"}
12
+ {"current_steps": 120, "total_steps": 4334, "loss": 0.519, "lr": 2.764976958525346e-06, "epoch": 0.05536332179930796, "percentage": 2.77, "elapsed_time": "0:40:23", "remaining_time": "23:38:08"}
13
+ {"current_steps": 130, "total_steps": 4334, "loss": 0.453, "lr": 2.9953917050691243e-06, "epoch": 0.05997693194925029, "percentage": 3.0, "elapsed_time": "0:43:24", "remaining_time": "23:23:45"}
14
+ {"current_steps": 140, "total_steps": 4334, "loss": 0.4799, "lr": 3.225806451612903e-06, "epoch": 0.06459054209919261, "percentage": 3.23, "elapsed_time": "0:46:35", "remaining_time": "23:15:56"}
15
+ {"current_steps": 150, "total_steps": 4334, "loss": 0.4896, "lr": 3.4562211981566825e-06, "epoch": 0.06920415224913495, "percentage": 3.46, "elapsed_time": "0:49:52", "remaining_time": "23:11:06"}
16
+ {"current_steps": 160, "total_steps": 4334, "loss": 0.5167, "lr": 3.6866359447004615e-06, "epoch": 0.07381776239907728, "percentage": 3.69, "elapsed_time": "0:53:28", "remaining_time": "23:15:09"}
17
+ {"current_steps": 170, "total_steps": 4334, "loss": 0.5127, "lr": 3.91705069124424e-06, "epoch": 0.0784313725490196, "percentage": 3.92, "elapsed_time": "0:56:46", "remaining_time": "23:10:32"}
18
+ {"current_steps": 180, "total_steps": 4334, "loss": 0.4465, "lr": 4.147465437788019e-06, "epoch": 0.08304498269896193, "percentage": 4.15, "elapsed_time": "1:00:28", "remaining_time": "23:15:49"}
19
+ {"current_steps": 190, "total_steps": 4334, "loss": 0.4761, "lr": 4.377880184331797e-06, "epoch": 0.08765859284890427, "percentage": 4.38, "elapsed_time": "1:03:45", "remaining_time": "23:10:31"}
20
+ {"current_steps": 200, "total_steps": 4334, "loss": 0.5055, "lr": 4.608294930875577e-06, "epoch": 0.0922722029988466, "percentage": 4.61, "elapsed_time": "1:06:56", "remaining_time": "23:03:37"}
21
+ {"current_steps": 210, "total_steps": 4334, "loss": 0.4557, "lr": 4.838709677419355e-06, "epoch": 0.09688581314878893, "percentage": 4.85, "elapsed_time": "1:10:10", "remaining_time": "22:58:15"}
22
+ {"current_steps": 220, "total_steps": 4334, "loss": 0.4686, "lr": 5.0691244239631346e-06, "epoch": 0.10149942329873125, "percentage": 5.08, "elapsed_time": "1:13:34", "remaining_time": "22:55:50"}
23
+ {"current_steps": 230, "total_steps": 4334, "loss": 0.4843, "lr": 5.299539170506913e-06, "epoch": 0.1061130334486736, "percentage": 5.31, "elapsed_time": "1:16:46", "remaining_time": "22:50:01"}
24
+ {"current_steps": 240, "total_steps": 4334, "loss": 0.5015, "lr": 5.529953917050692e-06, "epoch": 0.11072664359861592, "percentage": 5.54, "elapsed_time": "1:19:58", "remaining_time": "22:44:06"}
25
+ {"current_steps": 250, "total_steps": 4334, "loss": 0.4622, "lr": 5.76036866359447e-06, "epoch": 0.11534025374855825, "percentage": 5.77, "elapsed_time": "1:23:19", "remaining_time": "22:41:15"}
26
+ {"current_steps": 260, "total_steps": 4334, "loss": 0.4642, "lr": 5.9907834101382485e-06, "epoch": 0.11995386389850057, "percentage": 6.0, "elapsed_time": "1:26:41", "remaining_time": "22:38:26"}
27
+ {"current_steps": 270, "total_steps": 4334, "loss": 0.4357, "lr": 6.221198156682028e-06, "epoch": 0.1245674740484429, "percentage": 6.23, "elapsed_time": "1:29:49", "remaining_time": "22:32:08"}
28
+ {"current_steps": 280, "total_steps": 4334, "loss": 0.4547, "lr": 6.451612903225806e-06, "epoch": 0.12918108419838523, "percentage": 6.46, "elapsed_time": "1:33:16", "remaining_time": "22:30:25"}
29
+ {"current_steps": 290, "total_steps": 4334, "loss": 0.4464, "lr": 6.682027649769586e-06, "epoch": 0.13379469434832755, "percentage": 6.69, "elapsed_time": "1:36:50", "remaining_time": "22:30:31"}
30
+ {"current_steps": 300, "total_steps": 4334, "loss": 0.4957, "lr": 6.912442396313365e-06, "epoch": 0.1384083044982699, "percentage": 6.92, "elapsed_time": "1:40:13", "remaining_time": "22:27:44"}
31
+ {"current_steps": 310, "total_steps": 4334, "loss": 0.4473, "lr": 7.1428571428571436e-06, "epoch": 0.14302191464821223, "percentage": 7.15, "elapsed_time": "1:43:28", "remaining_time": "22:23:06"}
32
+ {"current_steps": 320, "total_steps": 4334, "loss": 0.4831, "lr": 7.373271889400923e-06, "epoch": 0.14763552479815456, "percentage": 7.38, "elapsed_time": "1:46:48", "remaining_time": "22:19:43"}
33
+ {"current_steps": 330, "total_steps": 4334, "loss": 0.4996, "lr": 7.603686635944701e-06, "epoch": 0.1522491349480969, "percentage": 7.61, "elapsed_time": "1:50:32", "remaining_time": "22:21:12"}
34
+ {"current_steps": 340, "total_steps": 4334, "loss": 0.4861, "lr": 7.83410138248848e-06, "epoch": 0.1568627450980392, "percentage": 7.84, "elapsed_time": "1:53:43", "remaining_time": "22:15:56"}
35
+ {"current_steps": 350, "total_steps": 4334, "loss": 0.4774, "lr": 8.064516129032258e-06, "epoch": 0.16147635524798154, "percentage": 8.08, "elapsed_time": "1:56:59", "remaining_time": "22:11:36"}
36
+ {"current_steps": 360, "total_steps": 4334, "loss": 0.505, "lr": 8.294930875576038e-06, "epoch": 0.16608996539792387, "percentage": 8.31, "elapsed_time": "2:00:25", "remaining_time": "22:09:22"}
37
+ {"current_steps": 370, "total_steps": 4334, "loss": 0.5116, "lr": 8.525345622119815e-06, "epoch": 0.1707035755478662, "percentage": 8.54, "elapsed_time": "2:03:39", "remaining_time": "22:04:45"}
38
+ {"current_steps": 380, "total_steps": 4334, "loss": 0.4307, "lr": 8.755760368663595e-06, "epoch": 0.17531718569780855, "percentage": 8.77, "elapsed_time": "2:07:02", "remaining_time": "22:01:52"}
39
+ {"current_steps": 390, "total_steps": 4334, "loss": 0.4525, "lr": 8.986175115207374e-06, "epoch": 0.17993079584775087, "percentage": 9.0, "elapsed_time": "2:10:23", "remaining_time": "21:58:41"}
40
+ {"current_steps": 400, "total_steps": 4334, "loss": 0.4557, "lr": 9.216589861751153e-06, "epoch": 0.1845444059976932, "percentage": 9.23, "elapsed_time": "2:13:57", "remaining_time": "21:57:29"}
41
+ {"current_steps": 410, "total_steps": 4334, "loss": 0.4927, "lr": 9.447004608294931e-06, "epoch": 0.18915801614763553, "percentage": 9.46, "elapsed_time": "2:17:12", "remaining_time": "21:53:15"}
42
+ {"current_steps": 420, "total_steps": 4334, "loss": 0.4926, "lr": 9.67741935483871e-06, "epoch": 0.19377162629757785, "percentage": 9.69, "elapsed_time": "2:20:26", "remaining_time": "21:48:46"}
43
+ {"current_steps": 430, "total_steps": 4334, "loss": 0.4373, "lr": 9.90783410138249e-06, "epoch": 0.19838523644752018, "percentage": 9.92, "elapsed_time": "2:23:35", "remaining_time": "21:43:40"}
44
+ {"current_steps": 440, "total_steps": 4334, "loss": 0.4877, "lr": 9.999941600087643e-06, "epoch": 0.2029988465974625, "percentage": 10.15, "elapsed_time": "2:26:42", "remaining_time": "21:38:23"}
45
+ {"current_steps": 450, "total_steps": 4334, "loss": 0.4628, "lr": 9.999584716674727e-06, "epoch": 0.20761245674740483, "percentage": 10.38, "elapsed_time": "2:30:08", "remaining_time": "21:35:52"}
46
+ {"current_steps": 460, "total_steps": 4334, "loss": 0.4757, "lr": 9.998903417374228e-06, "epoch": 0.2122260668973472, "percentage": 10.61, "elapsed_time": "2:33:39", "remaining_time": "21:34:07"}
47
+ {"current_steps": 470, "total_steps": 4334, "loss": 0.486, "lr": 9.997897746394684e-06, "epoch": 0.21683967704728951, "percentage": 10.84, "elapsed_time": "2:36:41", "remaining_time": "21:28:13"}
48
+ {"current_steps": 480, "total_steps": 4334, "loss": 0.4862, "lr": 9.996567768992642e-06, "epoch": 0.22145328719723184, "percentage": 11.08, "elapsed_time": "2:39:42", "remaining_time": "21:22:20"}
49
+ {"current_steps": 490, "total_steps": 4334, "loss": 0.486, "lr": 9.994913571468432e-06, "epoch": 0.22606689734717417, "percentage": 11.31, "elapsed_time": "2:43:11", "remaining_time": "21:20:09"}
50
+ {"current_steps": 500, "total_steps": 4334, "loss": 0.4468, "lr": 9.992935261160559e-06, "epoch": 0.2306805074971165, "percentage": 11.54, "elapsed_time": "2:46:31", "remaining_time": "21:16:58"}
51
+ {"current_steps": 500, "total_steps": 4334, "eval_loss": 0.39874735474586487, "epoch": 0.2306805074971165, "percentage": 11.54, "elapsed_time": "2:53:12", "remaining_time": "22:08:12"}
52
+ {"current_steps": 510, "total_steps": 4334, "loss": 0.4171, "lr": 9.990632966438743e-06, "epoch": 0.23529411764705882, "percentage": 11.77, "elapsed_time": "2:58:43", "remaining_time": "22:20:08"}
53
+ {"current_steps": 520, "total_steps": 4334, "loss": 0.4764, "lr": 9.988006836695593e-06, "epoch": 0.23990772779700115, "percentage": 12.0, "elapsed_time": "3:01:43", "remaining_time": "22:12:51"}
54
+ {"current_steps": 530, "total_steps": 4334, "loss": 0.4678, "lr": 9.985057042336898e-06, "epoch": 0.24452133794694347, "percentage": 12.23, "elapsed_time": "3:04:55", "remaining_time": "22:07:18"}
55
+ {"current_steps": 540, "total_steps": 4334, "loss": 0.4524, "lr": 9.981783774770595e-06, "epoch": 0.2491349480968858, "percentage": 12.46, "elapsed_time": "3:08:05", "remaining_time": "22:01:27"}
56
+ {"current_steps": 550, "total_steps": 4334, "loss": 0.4399, "lr": 9.97818724639432e-06, "epoch": 0.2537485582468281, "percentage": 12.69, "elapsed_time": "3:11:06", "remaining_time": "21:54:49"}
57
+ {"current_steps": 560, "total_steps": 4334, "loss": 0.4897, "lr": 9.974267690581646e-06, "epoch": 0.25836216839677045, "percentage": 12.92, "elapsed_time": "3:14:24", "remaining_time": "21:50:10"}
58
+ {"current_steps": 570, "total_steps": 4334, "loss": 0.4497, "lr": 9.970025361666934e-06, "epoch": 0.2629757785467128, "percentage": 13.15, "elapsed_time": "3:17:40", "remaining_time": "21:45:23"}
59
+ {"current_steps": 580, "total_steps": 4334, "loss": 0.4832, "lr": 9.965460534928827e-06, "epoch": 0.2675893886966551, "percentage": 13.38, "elapsed_time": "3:20:45", "remaining_time": "21:39:20"}
60
+ {"current_steps": 590, "total_steps": 4334, "loss": 0.4788, "lr": 9.960573506572391e-06, "epoch": 0.2722029988465975, "percentage": 13.61, "elapsed_time": "3:24:04", "remaining_time": "21:35:01"}
61
+ {"current_steps": 600, "total_steps": 4334, "loss": 0.4484, "lr": 9.95536459370989e-06, "epoch": 0.2768166089965398, "percentage": 13.84, "elapsed_time": "3:27:08", "remaining_time": "21:29:08"}
62
+ {"current_steps": 610, "total_steps": 4334, "loss": 0.4799, "lr": 9.949834134340219e-06, "epoch": 0.28143021914648214, "percentage": 14.07, "elapsed_time": "3:30:16", "remaining_time": "21:23:45"}
63
+ {"current_steps": 620, "total_steps": 4334, "loss": 0.4506, "lr": 9.94398248732696e-06, "epoch": 0.28604382929642447, "percentage": 14.31, "elapsed_time": "3:33:17", "remaining_time": "21:17:40"}
64
+ {"current_steps": 630, "total_steps": 4334, "loss": 0.5108, "lr": 9.9378100323751e-06, "epoch": 0.2906574394463668, "percentage": 14.54, "elapsed_time": "3:36:25", "remaining_time": "21:12:28"}
65
+ {"current_steps": 640, "total_steps": 4334, "loss": 0.4607, "lr": 9.931317170006398e-06, "epoch": 0.2952710495963091, "percentage": 14.77, "elapsed_time": "3:39:35", "remaining_time": "21:07:26"}
66
+ {"current_steps": 650, "total_steps": 4334, "loss": 0.4564, "lr": 9.924504321533387e-06, "epoch": 0.29988465974625145, "percentage": 15.0, "elapsed_time": "3:42:53", "remaining_time": "21:03:19"}
67
+ {"current_steps": 660, "total_steps": 4334, "loss": 0.447, "lr": 9.91737192903204e-06, "epoch": 0.3044982698961938, "percentage": 15.23, "elapsed_time": "3:46:03", "remaining_time": "20:58:22"}
68
+ {"current_steps": 670, "total_steps": 4334, "loss": 0.4712, "lr": 9.909920455313087e-06, "epoch": 0.3091118800461361, "percentage": 15.46, "elapsed_time": "3:49:32", "remaining_time": "20:55:19"}
69
+ {"current_steps": 680, "total_steps": 4334, "loss": 0.4831, "lr": 9.902150383891979e-06, "epoch": 0.3137254901960784, "percentage": 15.69, "elapsed_time": "3:52:35", "remaining_time": "20:49:52"}
70
+ {"current_steps": 690, "total_steps": 4334, "loss": 0.499, "lr": 9.894062218957517e-06, "epoch": 0.31833910034602075, "percentage": 15.92, "elapsed_time": "3:55:42", "remaining_time": "20:44:49"}
71
+ {"current_steps": 700, "total_steps": 4334, "loss": 0.4482, "lr": 9.885656485339129e-06, "epoch": 0.3229527104959631, "percentage": 16.15, "elapsed_time": "3:58:55", "remaining_time": "20:40:19"}
72
+ {"current_steps": 710, "total_steps": 4334, "loss": 0.4763, "lr": 9.876933728472826e-06, "epoch": 0.3275663206459054, "percentage": 16.38, "elapsed_time": "4:02:10", "remaining_time": "20:36:04"}
73
+ {"current_steps": 720, "total_steps": 4334, "loss": 0.4707, "lr": 9.867894514365802e-06, "epoch": 0.33217993079584773, "percentage": 16.61, "elapsed_time": "4:05:09", "remaining_time": "20:30:33"}
74
+ {"current_steps": 730, "total_steps": 4334, "loss": 0.472, "lr": 9.858539429559705e-06, "epoch": 0.33679354094579006, "percentage": 16.84, "elapsed_time": "4:08:24", "remaining_time": "20:26:25"}
75
+ {"current_steps": 740, "total_steps": 4334, "loss": 0.497, "lr": 9.848869081092581e-06, "epoch": 0.3414071510957324, "percentage": 17.07, "elapsed_time": "4:11:43", "remaining_time": "20:22:33"}
76
+ {"current_steps": 750, "total_steps": 4334, "loss": 0.4271, "lr": 9.838884096459486e-06, "epoch": 0.3460207612456747, "percentage": 17.31, "elapsed_time": "4:15:08", "remaining_time": "20:19:15"}
77
+ {"current_steps": 760, "total_steps": 4334, "loss": 0.4977, "lr": 9.828585123571763e-06, "epoch": 0.3506343713956171, "percentage": 17.54, "elapsed_time": "4:18:29", "remaining_time": "20:15:34"}
78
+ {"current_steps": 770, "total_steps": 4334, "loss": 0.4605, "lr": 9.817972830715003e-06, "epoch": 0.3552479815455594, "percentage": 17.77, "elapsed_time": "4:21:27", "remaining_time": "20:10:12"}
79
+ {"current_steps": 780, "total_steps": 4334, "loss": 0.4414, "lr": 9.807047906505683e-06, "epoch": 0.35986159169550175, "percentage": 18.0, "elapsed_time": "4:24:42", "remaining_time": "20:06:05"}
80
+ {"current_steps": 790, "total_steps": 4334, "loss": 0.4372, "lr": 9.795811059846476e-06, "epoch": 0.3644752018454441, "percentage": 18.23, "elapsed_time": "4:27:45", "remaining_time": "20:01:09"}
81
+ {"current_steps": 800, "total_steps": 4334, "loss": 0.486, "lr": 9.78426301988026e-06, "epoch": 0.3690888119953864, "percentage": 18.46, "elapsed_time": "4:30:50", "remaining_time": "19:56:28"}
82
+ {"current_steps": 810, "total_steps": 4334, "loss": 0.4645, "lr": 9.772404535942802e-06, "epoch": 0.3737024221453287, "percentage": 18.69, "elapsed_time": "4:34:12", "remaining_time": "19:52:57"}
83
+ {"current_steps": 820, "total_steps": 4334, "loss": 0.4503, "lr": 9.760236377514128e-06, "epoch": 0.37831603229527105, "percentage": 18.92, "elapsed_time": "4:37:17", "remaining_time": "19:48:16"}
84
+ {"current_steps": 830, "total_steps": 4334, "loss": 0.4602, "lr": 9.747759334168602e-06, "epoch": 0.3829296424452134, "percentage": 19.15, "elapsed_time": "4:40:22", "remaining_time": "19:43:40"}
85
+ {"current_steps": 840, "total_steps": 4334, "loss": 0.4535, "lr": 9.734974215523684e-06, "epoch": 0.3875432525951557, "percentage": 19.38, "elapsed_time": "4:43:41", "remaining_time": "19:40:02"}
86
+ {"current_steps": 850, "total_steps": 4334, "loss": 0.4569, "lr": 9.721881851187406e-06, "epoch": 0.39215686274509803, "percentage": 19.61, "elapsed_time": "4:46:50", "remaining_time": "19:35:43"}
87
+ {"current_steps": 860, "total_steps": 4334, "loss": 0.4359, "lr": 9.708483090704524e-06, "epoch": 0.39677047289504036, "percentage": 19.84, "elapsed_time": "4:50:03", "remaining_time": "19:31:42"}
88
+ {"current_steps": 870, "total_steps": 4334, "loss": 0.4573, "lr": 9.694778803501404e-06, "epoch": 0.4013840830449827, "percentage": 20.07, "elapsed_time": "4:53:09", "remaining_time": "19:27:16"}
89
+ {"current_steps": 880, "total_steps": 4334, "loss": 0.4737, "lr": 9.680769878829606e-06, "epoch": 0.405997693194925, "percentage": 20.3, "elapsed_time": "4:56:13", "remaining_time": "19:22:41"}
90
+ {"current_steps": 890, "total_steps": 4334, "loss": 0.4532, "lr": 9.666457225708175e-06, "epoch": 0.41061130334486734, "percentage": 20.54, "elapsed_time": "4:59:32", "remaining_time": "19:19:07"}
91
+ {"current_steps": 900, "total_steps": 4334, "loss": 0.4475, "lr": 9.65184177286466e-06, "epoch": 0.41522491349480967, "percentage": 20.77, "elapsed_time": "5:02:46", "remaining_time": "19:15:15"}
92
+ {"current_steps": 910, "total_steps": 4334, "loss": 0.4866, "lr": 9.636924468674856e-06, "epoch": 0.419838523644752, "percentage": 21.0, "elapsed_time": "5:05:52", "remaining_time": "19:10:54"}
93
+ {"current_steps": 920, "total_steps": 4334, "loss": 0.4437, "lr": 9.62170628110125e-06, "epoch": 0.4244521337946944, "percentage": 21.23, "elapsed_time": "5:09:14", "remaining_time": "19:07:31"}
94
+ {"current_steps": 930, "total_steps": 4334, "loss": 0.4349, "lr": 9.606188197630224e-06, "epoch": 0.4290657439446367, "percentage": 21.46, "elapsed_time": "5:12:31", "remaining_time": "19:03:56"}
95
+ {"current_steps": 940, "total_steps": 4334, "loss": 0.4917, "lr": 9.590371225207981e-06, "epoch": 0.43367935409457903, "percentage": 21.69, "elapsed_time": "5:15:49", "remaining_time": "19:00:19"}
96
+ {"current_steps": 950, "total_steps": 4334, "loss": 0.4682, "lr": 9.574256390175192e-06, "epoch": 0.43829296424452135, "percentage": 21.92, "elapsed_time": "5:19:05", "remaining_time": "18:56:37"}
97
+ {"current_steps": 960, "total_steps": 4334, "loss": 0.4912, "lr": 9.557844738200408e-06, "epoch": 0.4429065743944637, "percentage": 22.15, "elapsed_time": "5:22:33", "remaining_time": "18:53:38"}
98
+ {"current_steps": 970, "total_steps": 4334, "loss": 0.461, "lr": 9.541137334212212e-06, "epoch": 0.447520184544406, "percentage": 22.38, "elapsed_time": "5:25:46", "remaining_time": "18:49:49"}
99
+ {"current_steps": 980, "total_steps": 4334, "loss": 0.4414, "lr": 9.524135262330098e-06, "epoch": 0.45213379469434833, "percentage": 22.61, "elapsed_time": "5:29:14", "remaining_time": "18:46:49"}
100
+ {"current_steps": 990, "total_steps": 4334, "loss": 0.4457, "lr": 9.506839625794152e-06, "epoch": 0.45674740484429066, "percentage": 22.84, "elapsed_time": "5:32:45", "remaining_time": "18:44:00"}
101
+ {"current_steps": 1000, "total_steps": 4334, "loss": 0.4457, "lr": 9.489251546893441e-06, "epoch": 0.461361014994233, "percentage": 23.07, "elapsed_time": "5:35:59", "remaining_time": "18:40:10"}
102
+ {"current_steps": 1000, "total_steps": 4334, "eval_loss": 0.386065274477005, "epoch": 0.461361014994233, "percentage": 23.07, "elapsed_time": "5:42:39", "remaining_time": "19:02:26"}
103
+ {"current_steps": 1010, "total_steps": 4334, "loss": 0.4457, "lr": 9.4713721668932e-06, "epoch": 0.4659746251441753, "percentage": 23.3, "elapsed_time": "5:48:22", "remaining_time": "19:06:31"}
104
+ {"current_steps": 1020, "total_steps": 4334, "loss": 0.4343, "lr": 9.453202645960775e-06, "epoch": 0.47058823529411764, "percentage": 23.53, "elapsed_time": "5:51:32", "remaining_time": "19:02:11"}
105
+ {"current_steps": 1030, "total_steps": 4334, "loss": 0.402, "lr": 9.434744163090341e-06, "epoch": 0.47520184544405997, "percentage": 23.77, "elapsed_time": "5:54:42", "remaining_time": "18:57:49"}
106
+ {"current_steps": 1040, "total_steps": 4334, "loss": 0.4742, "lr": 9.415997916026401e-06, "epoch": 0.4798154555940023, "percentage": 24.0, "elapsed_time": "5:58:08", "remaining_time": "18:54:19"}
107
+ {"current_steps": 1050, "total_steps": 4334, "loss": 0.4487, "lr": 9.396965121186058e-06, "epoch": 0.4844290657439446, "percentage": 24.23, "elapsed_time": "6:01:12", "remaining_time": "18:49:41"}
108
+ {"current_steps": 1060, "total_steps": 4334, "loss": 0.449, "lr": 9.377647013580102e-06, "epoch": 0.48904267589388695, "percentage": 24.46, "elapsed_time": "6:04:22", "remaining_time": "18:45:26"}
109
+ {"current_steps": 1070, "total_steps": 4334, "loss": 0.4591, "lr": 9.358044846732848e-06, "epoch": 0.4936562860438293, "percentage": 24.69, "elapsed_time": "6:07:37", "remaining_time": "18:41:24"}
110
+ {"current_steps": 1080, "total_steps": 4334, "loss": 0.457, "lr": 9.338159892600809e-06, "epoch": 0.4982698961937716, "percentage": 24.92, "elapsed_time": "6:11:04", "remaining_time": "18:38:01"}
111
+ {"current_steps": 1090, "total_steps": 4334, "loss": 0.4863, "lr": 9.317993441490163e-06, "epoch": 0.5028835063437139, "percentage": 25.15, "elapsed_time": "6:14:13", "remaining_time": "18:33:44"}
112
+ {"current_steps": 1100, "total_steps": 4334, "loss": 0.4863, "lr": 9.297546801973027e-06, "epoch": 0.5074971164936563, "percentage": 25.38, "elapsed_time": "6:17:25", "remaining_time": "18:29:38"}
113
+ {"current_steps": 1110, "total_steps": 4334, "loss": 0.4445, "lr": 9.276821300802535e-06, "epoch": 0.5121107266435986, "percentage": 25.61, "elapsed_time": "6:20:47", "remaining_time": "18:26:01"}
114
+ {"current_steps": 1120, "total_steps": 4334, "loss": 0.4654, "lr": 9.255818282826755e-06, "epoch": 0.5167243367935409, "percentage": 25.84, "elapsed_time": "6:23:59", "remaining_time": "18:21:56"}
115
+ {"current_steps": 1130, "total_steps": 4334, "loss": 0.4164, "lr": 9.23453911090143e-06, "epoch": 0.5213379469434832, "percentage": 26.07, "elapsed_time": "6:27:09", "remaining_time": "18:17:44"}
116
+ {"current_steps": 1140, "total_steps": 4334, "loss": 0.4471, "lr": 9.21298516580153e-06, "epoch": 0.5259515570934256, "percentage": 26.3, "elapsed_time": "6:30:26", "remaining_time": "18:13:54"}
117
+ {"current_steps": 1150, "total_steps": 4334, "loss": 0.438, "lr": 9.191157846131662e-06, "epoch": 0.5305651672433679, "percentage": 26.53, "elapsed_time": "6:33:37", "remaining_time": "18:09:48"}
118
+ {"current_steps": 1160, "total_steps": 4334, "loss": 0.4754, "lr": 9.169058568235324e-06, "epoch": 0.5351787773933102, "percentage": 26.77, "elapsed_time": "6:36:45", "remaining_time": "18:05:35"}
119
+ {"current_steps": 1170, "total_steps": 4334, "loss": 0.4622, "lr": 9.146688766102985e-06, "epoch": 0.5397923875432526, "percentage": 27.0, "elapsed_time": "6:39:43", "remaining_time": "18:00:58"}
120
+ {"current_steps": 1180, "total_steps": 4334, "loss": 0.4778, "lr": 9.124049891279052e-06, "epoch": 0.544405997693195, "percentage": 27.23, "elapsed_time": "6:42:43", "remaining_time": "17:56:26"}
121
+ {"current_steps": 1190, "total_steps": 4334, "loss": 0.43, "lr": 9.101143412767665e-06, "epoch": 0.5490196078431373, "percentage": 27.46, "elapsed_time": "6:45:57", "remaining_time": "17:52:33"}
122
+ {"current_steps": 1200, "total_steps": 4334, "loss": 0.4398, "lr": 9.077970816937394e-06, "epoch": 0.5536332179930796, "percentage": 27.69, "elapsed_time": "6:49:14", "remaining_time": "17:48:48"}
123
+ {"current_steps": 1210, "total_steps": 4334, "loss": 0.4509, "lr": 9.05453360742477e-06, "epoch": 0.558246828143022, "percentage": 27.92, "elapsed_time": "6:52:38", "remaining_time": "17:45:21"}
124
+ {"current_steps": 1220, "total_steps": 4334, "loss": 0.4322, "lr": 9.030833305036732e-06, "epoch": 0.5628604382929643, "percentage": 28.15, "elapsed_time": "6:55:53", "remaining_time": "17:41:33"}
125
+ {"current_steps": 1230, "total_steps": 4334, "loss": 0.4463, "lr": 9.006871447651941e-06, "epoch": 0.5674740484429066, "percentage": 28.38, "elapsed_time": "6:59:08", "remaining_time": "17:37:42"}
126
+ {"current_steps": 1240, "total_steps": 4334, "loss": 0.4744, "lr": 8.982649590120982e-06, "epoch": 0.5720876585928489, "percentage": 28.61, "elapsed_time": "7:02:31", "remaining_time": "17:34:16"}
127
+ {"current_steps": 1250, "total_steps": 4334, "loss": 0.4506, "lr": 8.95816930416548e-06, "epoch": 0.5767012687427913, "percentage": 28.84, "elapsed_time": "7:05:42", "remaining_time": "17:30:18"}
128
+ {"current_steps": 1260, "total_steps": 4334, "loss": 0.5047, "lr": 8.933432178276108e-06, "epoch": 0.5813148788927336, "percentage": 29.07, "elapsed_time": "7:09:05", "remaining_time": "17:26:49"}
129
+ {"current_steps": 1270, "total_steps": 4334, "loss": 0.4331, "lr": 8.908439817609514e-06, "epoch": 0.5859284890426759, "percentage": 29.3, "elapsed_time": "7:12:23", "remaining_time": "17:23:10"}
130
+ {"current_steps": 1280, "total_steps": 4334, "loss": 0.4869, "lr": 8.883193843884169e-06, "epoch": 0.5905420991926182, "percentage": 29.53, "elapsed_time": "7:15:44", "remaining_time": "17:19:39"}
131
+ {"current_steps": 1290, "total_steps": 4334, "loss": 0.4568, "lr": 8.857695895275127e-06, "epoch": 0.5951557093425606, "percentage": 29.76, "elapsed_time": "7:18:47", "remaining_time": "17:15:25"}
132
+ {"current_steps": 1300, "total_steps": 4334, "loss": 0.4291, "lr": 8.831947626307735e-06, "epoch": 0.5997693194925029, "percentage": 30.0, "elapsed_time": "7:21:54", "remaining_time": "17:11:19"}
133
+ {"current_steps": 1310, "total_steps": 4334, "loss": 0.4864, "lr": 8.805950707750268e-06, "epoch": 0.6043829296424452, "percentage": 30.23, "elapsed_time": "7:25:20", "remaining_time": "17:08:00"}
134
+ {"current_steps": 1320, "total_steps": 4334, "loss": 0.4755, "lr": 8.779706826505513e-06, "epoch": 0.6089965397923875, "percentage": 30.46, "elapsed_time": "7:28:46", "remaining_time": "17:04:42"}
135
+ {"current_steps": 1330, "total_steps": 4334, "loss": 0.4429, "lr": 8.753217685501317e-06, "epoch": 0.6136101499423299, "percentage": 30.69, "elapsed_time": "7:32:05", "remaining_time": "17:01:06"}
136
+ {"current_steps": 1340, "total_steps": 4334, "loss": 0.4799, "lr": 8.72648500358008e-06, "epoch": 0.6182237600922722, "percentage": 30.92, "elapsed_time": "7:35:13", "remaining_time": "16:57:08"}
137
+ {"current_steps": 1350, "total_steps": 4334, "loss": 0.4238, "lr": 8.699510515387222e-06, "epoch": 0.6228373702422145, "percentage": 31.15, "elapsed_time": "7:38:34", "remaining_time": "16:53:36"}
138
+ {"current_steps": 1360, "total_steps": 4334, "loss": 0.4621, "lr": 8.672295971258624e-06, "epoch": 0.6274509803921569, "percentage": 31.38, "elapsed_time": "7:41:43", "remaining_time": "16:49:40"}
139
+ {"current_steps": 1370, "total_steps": 4334, "loss": 0.482, "lr": 8.644843137107058e-06, "epoch": 0.6320645905420992, "percentage": 31.61, "elapsed_time": "7:45:08", "remaining_time": "16:46:19"}
140
+ {"current_steps": 1380, "total_steps": 4334, "loss": 0.4138, "lr": 8.617153794307588e-06, "epoch": 0.6366782006920415, "percentage": 31.84, "elapsed_time": "7:48:25", "remaining_time": "16:42:42"}
141
+ {"current_steps": 1390, "total_steps": 4334, "loss": 0.4808, "lr": 8.58922973958199e-06, "epoch": 0.6412918108419838, "percentage": 32.07, "elapsed_time": "7:51:51", "remaining_time": "16:39:24"}
142
+ {"current_steps": 1400, "total_steps": 4334, "loss": 0.4196, "lr": 8.561072784882156e-06, "epoch": 0.6459054209919262, "percentage": 32.3, "elapsed_time": "7:54:59", "remaining_time": "16:35:26"}
143
+ {"current_steps": 1410, "total_steps": 4334, "loss": 0.4675, "lr": 8.532684757272527e-06, "epoch": 0.6505190311418685, "percentage": 32.53, "elapsed_time": "7:58:17", "remaining_time": "16:31:51"}
144
+ {"current_steps": 1420, "total_steps": 4334, "loss": 0.4585, "lr": 8.504067498811533e-06, "epoch": 0.6551326412918108, "percentage": 32.76, "elapsed_time": "8:01:34", "remaining_time": "16:28:14"}
145
+ {"current_steps": 1430, "total_steps": 4334, "loss": 0.4557, "lr": 8.475222866432065e-06, "epoch": 0.6597462514417531, "percentage": 32.99, "elapsed_time": "8:04:56", "remaining_time": "16:24:49"}
146
+ {"current_steps": 1440, "total_steps": 4334, "loss": 0.4378, "lr": 8.446152731820984e-06, "epoch": 0.6643598615916955, "percentage": 33.23, "elapsed_time": "8:08:20", "remaining_time": "16:21:26"}
147
+ {"current_steps": 1450, "total_steps": 4334, "loss": 0.482, "lr": 8.416858981297663e-06, "epoch": 0.6689734717416378, "percentage": 33.46, "elapsed_time": "8:11:53", "remaining_time": "16:18:20"}
148
+ {"current_steps": 1460, "total_steps": 4334, "loss": 0.4153, "lr": 8.387343515691594e-06, "epoch": 0.6735870818915801, "percentage": 33.69, "elapsed_time": "8:15:16", "remaining_time": "16:14:57"}
149
+ {"current_steps": 1470, "total_steps": 4334, "loss": 0.4619, "lr": 8.357608250219046e-06, "epoch": 0.6782006920415224, "percentage": 33.92, "elapsed_time": "8:18:40", "remaining_time": "16:11:34"}
150
+ {"current_steps": 1480, "total_steps": 4334, "loss": 0.4327, "lr": 8.327655114358782e-06, "epoch": 0.6828143021914648, "percentage": 34.15, "elapsed_time": "8:21:55", "remaining_time": "16:07:54"}
151
+ {"current_steps": 1490, "total_steps": 4334, "loss": 0.4713, "lr": 8.297486051726864e-06, "epoch": 0.6874279123414071, "percentage": 34.38, "elapsed_time": "8:25:06", "remaining_time": "16:04:05"}
152
+ {"current_steps": 1500, "total_steps": 4334, "loss": 0.4197, "lr": 8.267103019950529e-06, "epoch": 0.6920415224913494, "percentage": 34.61, "elapsed_time": "8:28:22", "remaining_time": "16:00:28"}
153
+ {"current_steps": 1500, "total_steps": 4334, "eval_loss": 0.3744993507862091, "epoch": 0.6920415224913494, "percentage": 34.61, "elapsed_time": "8:35:11", "remaining_time": "16:13:22"}
154
+ {"current_steps": 1510, "total_steps": 4334, "loss": 0.4525, "lr": 8.23650799054117e-06, "epoch": 0.6966551326412919, "percentage": 34.84, "elapsed_time": "8:40:54", "remaining_time": "16:14:11"}
155
+ {"current_steps": 1520, "total_steps": 4334, "loss": 0.4344, "lr": 8.2057029487664e-06, "epoch": 0.7012687427912342, "percentage": 35.07, "elapsed_time": "8:44:14", "remaining_time": "16:10:32"}
156
+ {"current_steps": 1530, "total_steps": 4334, "loss": 0.4456, "lr": 8.174689893521239e-06, "epoch": 0.7058823529411765, "percentage": 35.3, "elapsed_time": "8:47:41", "remaining_time": "16:07:06"}
157
+ {"current_steps": 1540, "total_steps": 4334, "loss": 0.4342, "lr": 8.143470837198394e-06, "epoch": 0.7104959630911188, "percentage": 35.53, "elapsed_time": "8:51:04", "remaining_time": "16:03:30"}
158
+ {"current_steps": 1550, "total_steps": 4334, "loss": 0.4407, "lr": 8.112047805557693e-06, "epoch": 0.7151095732410612, "percentage": 35.76, "elapsed_time": "8:54:27", "remaining_time": "15:59:58"}
159
+ {"current_steps": 1560, "total_steps": 4334, "loss": 0.4188, "lr": 8.080422837594627e-06, "epoch": 0.7197231833910035, "percentage": 35.99, "elapsed_time": "8:57:39", "remaining_time": "15:56:04"}
160
+ {"current_steps": 1570, "total_steps": 4334, "loss": 0.4594, "lr": 8.048597985408047e-06, "epoch": 0.7243367935409458, "percentage": 36.23, "elapsed_time": "9:00:59", "remaining_time": "15:52:25"}
161
+ {"current_steps": 1580, "total_steps": 4334, "loss": 0.4549, "lr": 8.016575314067005e-06, "epoch": 0.7289504036908881, "percentage": 36.46, "elapsed_time": "9:04:10", "remaining_time": "15:48:31"}
162
+ {"current_steps": 1590, "total_steps": 4334, "loss": 0.4548, "lr": 7.984356901476755e-06, "epoch": 0.7335640138408305, "percentage": 36.69, "elapsed_time": "9:07:27", "remaining_time": "15:44:48"}
163
+ {"current_steps": 1600, "total_steps": 4334, "loss": 0.4452, "lr": 7.951944838243916e-06, "epoch": 0.7381776239907728, "percentage": 36.92, "elapsed_time": "9:10:40", "remaining_time": "15:40:58"}
164
+ {"current_steps": 1610, "total_steps": 4334, "loss": 0.4491, "lr": 7.919341227540828e-06, "epoch": 0.7427912341407151, "percentage": 37.15, "elapsed_time": "9:13:54", "remaining_time": "15:37:10"}
165
+ {"current_steps": 1620, "total_steps": 4334, "loss": 0.4731, "lr": 7.886548184969063e-06, "epoch": 0.7474048442906575, "percentage": 37.38, "elapsed_time": "9:17:25", "remaining_time": "15:33:51"}
166
+ {"current_steps": 1630, "total_steps": 4334, "loss": 0.432, "lr": 7.85356783842216e-06, "epoch": 0.7520184544405998, "percentage": 37.61, "elapsed_time": "9:20:44", "remaining_time": "15:30:12"}
167
+ {"current_steps": 1640, "total_steps": 4334, "loss": 0.461, "lr": 7.820402327947543e-06, "epoch": 0.7566320645905421, "percentage": 37.84, "elapsed_time": "9:24:02", "remaining_time": "15:26:33"}
168
+ {"current_steps": 1650, "total_steps": 4334, "loss": 0.4118, "lr": 7.78705380560766e-06, "epoch": 0.7612456747404844, "percentage": 38.07, "elapsed_time": "9:27:34", "remaining_time": "15:23:16"}
169
+ {"current_steps": 1660, "total_steps": 4334, "loss": 0.445, "lr": 7.753524435340334e-06, "epoch": 0.7658592848904268, "percentage": 38.3, "elapsed_time": "9:31:08", "remaining_time": "15:20:00"}
170
+ {"current_steps": 1670, "total_steps": 4334, "loss": 0.453, "lr": 7.719816392818354e-06, "epoch": 0.7704728950403691, "percentage": 38.53, "elapsed_time": "9:34:37", "remaining_time": "15:16:38"}
171
+ {"current_steps": 1680, "total_steps": 4334, "loss": 0.4424, "lr": 7.685931865308293e-06, "epoch": 0.7750865051903114, "percentage": 38.76, "elapsed_time": "9:38:04", "remaining_time": "15:13:12"}
172
+ {"current_steps": 1690, "total_steps": 4334, "loss": 0.4164, "lr": 7.651873051528582e-06, "epoch": 0.7797001153402537, "percentage": 38.99, "elapsed_time": "9:41:19", "remaining_time": "15:09:28"}
173
+ {"current_steps": 1700, "total_steps": 4334, "loss": 0.4345, "lr": 7.617642161506837e-06, "epoch": 0.7843137254901961, "percentage": 39.22, "elapsed_time": "9:44:23", "remaining_time": "15:05:27"}
174
+ {"current_steps": 1710, "total_steps": 4334, "loss": 0.4373, "lr": 7.583241416436462e-06, "epoch": 0.7889273356401384, "percentage": 39.46, "elapsed_time": "9:47:35", "remaining_time": "15:01:39"}
175
+ {"current_steps": 1720, "total_steps": 4334, "loss": 0.4146, "lr": 7.548673048532504e-06, "epoch": 0.7935409457900807, "percentage": 39.69, "elapsed_time": "9:50:49", "remaining_time": "14:57:55"}
176
+ {"current_steps": 1730, "total_steps": 4334, "loss": 0.4008, "lr": 7.513939300886816e-06, "epoch": 0.798154555940023, "percentage": 39.92, "elapsed_time": "9:53:50", "remaining_time": "14:53:50"}
177
+ {"current_steps": 1740, "total_steps": 4334, "loss": 0.4401, "lr": 7.479042427322509e-06, "epoch": 0.8027681660899654, "percentage": 40.15, "elapsed_time": "9:56:59", "remaining_time": "14:49:59"}
178
+ {"current_steps": 1750, "total_steps": 4334, "loss": 0.4565, "lr": 7.443984692247701e-06, "epoch": 0.8073817762399077, "percentage": 40.38, "elapsed_time": "10:00:04", "remaining_time": "14:46:02"}
179
+ {"current_steps": 1760, "total_steps": 4334, "loss": 0.432, "lr": 7.408768370508577e-06, "epoch": 0.81199538638985, "percentage": 40.61, "elapsed_time": "10:03:28", "remaining_time": "14:42:35"}
180
+ {"current_steps": 1770, "total_steps": 4334, "loss": 0.3847, "lr": 7.373395747241792e-06, "epoch": 0.8166089965397924, "percentage": 40.84, "elapsed_time": "10:06:52", "remaining_time": "14:39:06"}
181
+ {"current_steps": 1780, "total_steps": 4334, "loss": 0.412, "lr": 7.337869117726176e-06, "epoch": 0.8212226066897347, "percentage": 41.07, "elapsed_time": "10:10:13", "remaining_time": "14:35:34"}
182
+ {"current_steps": 1790, "total_steps": 4334, "loss": 0.4462, "lr": 7.302190787233808e-06, "epoch": 0.825836216839677, "percentage": 41.3, "elapsed_time": "10:13:28", "remaining_time": "14:31:53"}
183
+ {"current_steps": 1800, "total_steps": 4334, "loss": 0.4321, "lr": 7.266363070880424e-06, "epoch": 0.8304498269896193, "percentage": 41.53, "elapsed_time": "10:16:40", "remaining_time": "14:28:08"}
184
+ {"current_steps": 1810, "total_steps": 4334, "loss": 0.4477, "lr": 7.2303882934751965e-06, "epoch": 0.8350634371395617, "percentage": 41.76, "elapsed_time": "10:19:58", "remaining_time": "14:24:32"}
185
+ {"current_steps": 1820, "total_steps": 4334, "loss": 0.4028, "lr": 7.194268789369875e-06, "epoch": 0.839677047289504, "percentage": 41.99, "elapsed_time": "10:23:15", "remaining_time": "14:20:55"}
186
+ {"current_steps": 1830, "total_steps": 4334, "loss": 0.457, "lr": 7.158006902307322e-06, "epoch": 0.8442906574394463, "percentage": 42.22, "elapsed_time": "10:26:25", "remaining_time": "14:17:07"}
187
+ {"current_steps": 1840, "total_steps": 4334, "loss": 0.4248, "lr": 7.121604985269423e-06, "epoch": 0.8489042675893888, "percentage": 42.46, "elapsed_time": "10:29:30", "remaining_time": "14:13:15"}
188
+ {"current_steps": 1850, "total_steps": 4334, "loss": 0.4731, "lr": 7.085065400324407e-06, "epoch": 0.8535178777393311, "percentage": 42.69, "elapsed_time": "10:32:40", "remaining_time": "14:09:29"}
189
+ {"current_steps": 1860, "total_steps": 4334, "loss": 0.3925, "lr": 7.048390518473579e-06, "epoch": 0.8581314878892734, "percentage": 42.92, "elapsed_time": "10:35:53", "remaining_time": "14:05:47"}
190
+ {"current_steps": 1870, "total_steps": 4334, "loss": 0.4481, "lr": 7.011582719497466e-06, "epoch": 0.8627450980392157, "percentage": 43.15, "elapsed_time": "10:39:11", "remaining_time": "14:02:14"}
191
+ {"current_steps": 1880, "total_steps": 4334, "loss": 0.4487, "lr": 6.974644391801395e-06, "epoch": 0.8673587081891581, "percentage": 43.38, "elapsed_time": "10:42:18", "remaining_time": "13:58:24"}
192
+ {"current_steps": 1890, "total_steps": 4334, "loss": 0.4424, "lr": 6.9375779322605154e-06, "epoch": 0.8719723183391004, "percentage": 43.61, "elapsed_time": "10:45:26", "remaining_time": "13:54:37"}
193
+ {"current_steps": 1900, "total_steps": 4334, "loss": 0.4628, "lr": 6.900385746064268e-06, "epoch": 0.8765859284890427, "percentage": 43.84, "elapsed_time": "10:48:39", "remaining_time": "13:50:57"}
194
+ {"current_steps": 1910, "total_steps": 4334, "loss": 0.4194, "lr": 6.863070246560319e-06, "epoch": 0.881199538638985, "percentage": 44.07, "elapsed_time": "10:51:57", "remaining_time": "13:47:24"}
195
+ {"current_steps": 1920, "total_steps": 4334, "loss": 0.4404, "lr": 6.825633855097954e-06, "epoch": 0.8858131487889274, "percentage": 44.3, "elapsed_time": "10:55:19", "remaining_time": "13:43:56"}
196
+ {"current_steps": 1930, "total_steps": 4334, "loss": 0.4654, "lr": 6.788079000870966e-06, "epoch": 0.8904267589388697, "percentage": 44.53, "elapsed_time": "10:58:42", "remaining_time": "13:40:29"}
197
+ {"current_steps": 1940, "total_steps": 4334, "loss": 0.4849, "lr": 6.7504081207600295e-06, "epoch": 0.895040369088812, "percentage": 44.76, "elapsed_time": "11:02:10", "remaining_time": "13:37:08"}
198
+ {"current_steps": 1950, "total_steps": 4334, "loss": 0.4286, "lr": 6.712623659174569e-06, "epoch": 0.8996539792387543, "percentage": 44.99, "elapsed_time": "11:05:27", "remaining_time": "13:33:33"}
199
+ {"current_steps": 1960, "total_steps": 4334, "loss": 0.4271, "lr": 6.674728067894149e-06, "epoch": 0.9042675893886967, "percentage": 45.22, "elapsed_time": "11:08:58", "remaining_time": "13:30:16"}
200
+ {"current_steps": 1970, "total_steps": 4334, "loss": 0.4384, "lr": 6.636723805909384e-06, "epoch": 0.908881199538639, "percentage": 45.45, "elapsed_time": "11:12:18", "remaining_time": "13:26:46"}
201
+ {"current_steps": 1980, "total_steps": 4334, "loss": 0.4058, "lr": 6.598613339262369e-06, "epoch": 0.9134948096885813, "percentage": 45.69, "elapsed_time": "11:15:27", "remaining_time": "13:23:02"}
202
+ {"current_steps": 1990, "total_steps": 4334, "loss": 0.4047, "lr": 6.560399140886673e-06, "epoch": 0.9181084198385236, "percentage": 45.92, "elapsed_time": "11:18:52", "remaining_time": "13:19:38"}
203
+ {"current_steps": 2000, "total_steps": 4334, "loss": 0.4264, "lr": 6.522083690446863e-06, "epoch": 0.922722029988466, "percentage": 46.15, "elapsed_time": "11:22:19", "remaining_time": "13:16:16"}
204
+ {"current_steps": 2000, "total_steps": 4334, "eval_loss": 0.3640458583831787, "epoch": 0.922722029988466, "percentage": 46.15, "elapsed_time": "11:29:04", "remaining_time": "13:24:09"}
205
+ {"current_steps": 2010, "total_steps": 4334, "loss": 0.4309, "lr": 6.483669474177609e-06, "epoch": 0.9273356401384083, "percentage": 46.38, "elapsed_time": "11:35:08", "remaining_time": "13:23:44"}
206
+ {"current_steps": 2020, "total_steps": 4334, "loss": 0.4321, "lr": 6.445158984722358e-06, "epoch": 0.9319492502883506, "percentage": 46.61, "elapsed_time": "11:38:39", "remaining_time": "13:20:20"}
207
+ {"current_steps": 2030, "total_steps": 4334, "loss": 0.4118, "lr": 6.406554720971583e-06, "epoch": 0.936562860438293, "percentage": 46.84, "elapsed_time": "11:41:54", "remaining_time": "13:16:38"}
208
+ {"current_steps": 2040, "total_steps": 4334, "loss": 0.4508, "lr": 6.367859187900635e-06, "epoch": 0.9411764705882353, "percentage": 47.07, "elapsed_time": "11:45:06", "remaining_time": "13:12:53"}
209
+ {"current_steps": 2050, "total_steps": 4334, "loss": 0.4088, "lr": 6.329074896407202e-06, "epoch": 0.9457900807381776, "percentage": 47.3, "elapsed_time": "11:48:15", "remaining_time": "13:09:06"}
210
+ {"current_steps": 2060, "total_steps": 4334, "loss": 0.4, "lr": 6.29020436314838e-06, "epoch": 0.9504036908881199, "percentage": 47.53, "elapsed_time": "11:51:42", "remaining_time": "13:05:38"}
211
+ {"current_steps": 2070, "total_steps": 4334, "loss": 0.4122, "lr": 6.251250110377368e-06, "epoch": 0.9550173010380623, "percentage": 47.76, "elapsed_time": "11:55:12", "remaining_time": "13:02:14"}
212
+ {"current_steps": 2080, "total_steps": 4334, "loss": 0.4449, "lr": 6.212214665779805e-06, "epoch": 0.9596309111880046, "percentage": 47.99, "elapsed_time": "11:58:37", "remaining_time": "12:58:44"}
213
+ {"current_steps": 2090, "total_steps": 4334, "loss": 0.4229, "lr": 6.173100562309751e-06, "epoch": 0.9642445213379469, "percentage": 48.22, "elapsed_time": "12:01:49", "remaining_time": "12:55:00"}
214
+ {"current_steps": 2100, "total_steps": 4334, "loss": 0.4389, "lr": 6.133910338025329e-06, "epoch": 0.9688581314878892, "percentage": 48.45, "elapsed_time": "12:05:22", "remaining_time": "12:51:39"}
215
+ {"current_steps": 2110, "total_steps": 4334, "loss": 0.4459, "lr": 6.094646535924026e-06, "epoch": 0.9734717416378316, "percentage": 48.68, "elapsed_time": "12:08:47", "remaining_time": "12:48:10"}
216
+ {"current_steps": 2120, "total_steps": 4334, "loss": 0.4556, "lr": 6.055311703777699e-06, "epoch": 0.9780853517877739, "percentage": 48.92, "elapsed_time": "12:11:52", "remaining_time": "12:44:19"}
217
+ {"current_steps": 2130, "total_steps": 4334, "loss": 0.4837, "lr": 6.0159083939672326e-06, "epoch": 0.9826989619377162, "percentage": 49.15, "elapsed_time": "12:14:59", "remaining_time": "12:40:31"}
218
+ {"current_steps": 2140, "total_steps": 4334, "loss": 0.4119, "lr": 5.976439163316936e-06, "epoch": 0.9873125720876585, "percentage": 49.38, "elapsed_time": "12:18:20", "remaining_time": "12:36:58"}
219
+ {"current_steps": 2150, "total_steps": 4334, "loss": 0.4391, "lr": 5.936906572928625e-06, "epoch": 0.9919261822376009, "percentage": 49.61, "elapsed_time": "12:21:46", "remaining_time": "12:33:30"}
220
+ {"current_steps": 2160, "total_steps": 4334, "loss": 0.4175, "lr": 5.897313188015433e-06, "epoch": 0.9965397923875432, "percentage": 49.84, "elapsed_time": "12:25:00", "remaining_time": "12:29:50"}
221
+ {"current_steps": 2170, "total_steps": 4334, "loss": 0.4176, "lr": 5.8576615777353725e-06, "epoch": 1.0009227220299886, "percentage": 50.07, "elapsed_time": "12:28:05", "remaining_time": "12:26:01"}
222
+ {"current_steps": 2180, "total_steps": 4334, "loss": 0.3182, "lr": 5.81795431502461e-06, "epoch": 1.0055363321799309, "percentage": 50.3, "elapsed_time": "12:31:27", "remaining_time": "12:22:30"}
223
+ {"current_steps": 2190, "total_steps": 4334, "loss": 0.3412, "lr": 5.778193976430518e-06, "epoch": 1.0101499423298732, "percentage": 50.53, "elapsed_time": "12:34:43", "remaining_time": "12:18:51"}
224
+ {"current_steps": 2200, "total_steps": 4334, "loss": 0.3254, "lr": 5.738383141944493e-06, "epoch": 1.0147635524798155, "percentage": 50.76, "elapsed_time": "12:37:52", "remaining_time": "12:15:08"}
225
+ {"current_steps": 2210, "total_steps": 4334, "loss": 0.3121, "lr": 5.698524394834531e-06, "epoch": 1.0193771626297579, "percentage": 50.99, "elapsed_time": "12:41:08", "remaining_time": "12:11:31"}
226
+ {"current_steps": 2220, "total_steps": 4334, "loss": 0.309, "lr": 5.658620321477613e-06, "epoch": 1.0239907727797002, "percentage": 51.22, "elapsed_time": "12:44:23", "remaining_time": "12:07:53"}
227
+ {"current_steps": 2230, "total_steps": 4334, "loss": 0.2945, "lr": 5.6186735111918735e-06, "epoch": 1.0286043829296425, "percentage": 51.45, "elapsed_time": "12:47:40", "remaining_time": "12:04:17"}
228
+ {"current_steps": 2240, "total_steps": 4334, "loss": 0.3277, "lr": 5.5786865560685855e-06, "epoch": 1.0332179930795848, "percentage": 51.68, "elapsed_time": "12:50:51", "remaining_time": "12:00:36"}
229
+ {"current_steps": 2250, "total_steps": 4334, "loss": 0.3337, "lr": 5.538662050803965e-06, "epoch": 1.0378316032295272, "percentage": 51.92, "elapsed_time": "12:54:19", "remaining_time": "11:57:11"}
230
+ {"current_steps": 2260, "total_steps": 4334, "loss": 0.3145, "lr": 5.498602592530799e-06, "epoch": 1.0424452133794695, "percentage": 52.15, "elapsed_time": "12:57:33", "remaining_time": "11:53:33"}
231
+ {"current_steps": 2270, "total_steps": 4334, "loss": 0.3016, "lr": 5.458510780649932e-06, "epoch": 1.0470588235294118, "percentage": 52.38, "elapsed_time": "13:00:53", "remaining_time": "11:50:01"}
232
+ {"current_steps": 2280, "total_steps": 4334, "loss": 0.3107, "lr": 5.41838921666158e-06, "epoch": 1.0516724336793541, "percentage": 52.61, "elapsed_time": "13:04:04", "remaining_time": "11:46:21"}
233
+ {"current_steps": 2290, "total_steps": 4334, "loss": 0.313, "lr": 5.378240503996531e-06, "epoch": 1.0562860438292965, "percentage": 52.84, "elapsed_time": "13:07:15", "remaining_time": "11:42:40"}
234
+ {"current_steps": 2300, "total_steps": 4334, "loss": 0.3186, "lr": 5.338067247847219e-06, "epoch": 1.0608996539792388, "percentage": 53.07, "elapsed_time": "13:10:42", "remaining_time": "11:39:15"}
235
+ {"current_steps": 2310, "total_steps": 4334, "loss": 0.3198, "lr": 5.297872054998663e-06, "epoch": 1.0655132641291811, "percentage": 53.3, "elapsed_time": "13:14:08", "remaining_time": "11:35:48"}
236
+ {"current_steps": 2320, "total_steps": 4334, "loss": 0.3181, "lr": 5.257657533659326e-06, "epoch": 1.0701268742791235, "percentage": 53.53, "elapsed_time": "13:17:24", "remaining_time": "11:32:13"}
237
+ {"current_steps": 2330, "total_steps": 4334, "loss": 0.3369, "lr": 5.217426293291869e-06, "epoch": 1.0747404844290658, "percentage": 53.76, "elapsed_time": "13:20:38", "remaining_time": "11:28:37"}
238
+ {"current_steps": 2340, "total_steps": 4334, "loss": 0.311, "lr": 5.177180944443821e-06, "epoch": 1.079354094579008, "percentage": 53.99, "elapsed_time": "13:23:57", "remaining_time": "11:25:04"}
239
+ {"current_steps": 2350, "total_steps": 4334, "loss": 0.3109, "lr": 5.136924098578201e-06, "epoch": 1.0839677047289504, "percentage": 54.22, "elapsed_time": "13:27:13", "remaining_time": "11:21:29"}
240
+ {"current_steps": 2360, "total_steps": 4334, "loss": 0.2808, "lr": 5.096658367904043e-06, "epoch": 1.0885813148788928, "percentage": 54.45, "elapsed_time": "13:30:35", "remaining_time": "11:18:01"}
241
+ {"current_steps": 2370, "total_steps": 4334, "loss": 0.3435, "lr": 5.056386365206908e-06, "epoch": 1.093194925028835, "percentage": 54.68, "elapsed_time": "13:33:46", "remaining_time": "11:14:22"}
242
+ {"current_steps": 2380, "total_steps": 4334, "loss": 0.3141, "lr": 5.016110703679341e-06, "epoch": 1.0978085351787774, "percentage": 54.91, "elapsed_time": "13:37:14", "remaining_time": "11:10:57"}
243
+ {"current_steps": 2390, "total_steps": 4334, "loss": 0.3074, "lr": 4.9758339967512995e-06, "epoch": 1.1024221453287197, "percentage": 55.15, "elapsed_time": "13:40:43", "remaining_time": "11:07:34"}
244
+ {"current_steps": 2400, "total_steps": 4334, "loss": 0.3255, "lr": 4.935558857920576e-06, "epoch": 1.107035755478662, "percentage": 55.38, "elapsed_time": "13:44:10", "remaining_time": "11:04:09"}
245
+ {"current_steps": 2410, "total_steps": 4334, "loss": 0.3007, "lr": 4.895287900583216e-06, "epoch": 1.1116493656286044, "percentage": 55.61, "elapsed_time": "13:47:20", "remaining_time": "11:00:29"}
246
+ {"current_steps": 2420, "total_steps": 4334, "loss": 0.3383, "lr": 4.855023737863927e-06, "epoch": 1.1162629757785467, "percentage": 55.84, "elapsed_time": "13:50:37", "remaining_time": "10:56:57"}
247
+ {"current_steps": 2430, "total_steps": 4334, "loss": 0.3207, "lr": 4.814768982446532e-06, "epoch": 1.120876585928489, "percentage": 56.07, "elapsed_time": "13:53:56", "remaining_time": "10:53:25"}
248
+ {"current_steps": 2440, "total_steps": 4334, "loss": 0.3069, "lr": 4.774526246404417e-06, "epoch": 1.1254901960784314, "percentage": 56.3, "elapsed_time": "13:57:01", "remaining_time": "10:49:43"}
249
+ {"current_steps": 2450, "total_steps": 4334, "loss": 0.2949, "lr": 4.734298141031057e-06, "epoch": 1.1301038062283737, "percentage": 56.53, "elapsed_time": "14:00:12", "remaining_time": "10:46:06"}
250
+ {"current_steps": 2460, "total_steps": 4334, "loss": 0.3602, "lr": 4.69408727667056e-06, "epoch": 1.134717416378316, "percentage": 56.76, "elapsed_time": "14:03:22", "remaining_time": "10:42:28"}
251
+ {"current_steps": 2470, "total_steps": 4334, "loss": 0.2999, "lr": 4.653896262548291e-06, "epoch": 1.1393310265282584, "percentage": 56.99, "elapsed_time": "14:06:36", "remaining_time": "10:38:53"}
252
+ {"current_steps": 2480, "total_steps": 4334, "loss": 0.3186, "lr": 4.613727706601558e-06, "epoch": 1.1439446366782007, "percentage": 57.22, "elapsed_time": "14:10:04", "remaining_time": "10:35:29"}
253
+ {"current_steps": 2490, "total_steps": 4334, "loss": 0.2857, "lr": 4.573584215310394e-06, "epoch": 1.148558246828143, "percentage": 57.45, "elapsed_time": "14:13:34", "remaining_time": "10:32:07"}
254
+ {"current_steps": 2500, "total_steps": 4334, "loss": 0.3188, "lr": 4.533468393528421e-06, "epoch": 1.1531718569780853, "percentage": 57.68, "elapsed_time": "14:17:00", "remaining_time": "10:28:41"}
255
+ {"current_steps": 2500, "total_steps": 4334, "eval_loss": 0.36377301812171936, "epoch": 1.1531718569780853, "percentage": 57.68, "elapsed_time": "14:24:12", "remaining_time": "10:33:58"}
256
+ {"current_steps": 2510, "total_steps": 4334, "loss": 0.3255, "lr": 4.493382844313826e-06, "epoch": 1.1577854671280277, "percentage": 57.91, "elapsed_time": "14:30:19", "remaining_time": "10:32:27"}
257
+ {"current_steps": 2520, "total_steps": 4334, "loss": 0.3408, "lr": 4.453330168760451e-06, "epoch": 1.16239907727797, "percentage": 58.14, "elapsed_time": "14:33:37", "remaining_time": "10:28:52"}
258
+ {"current_steps": 2530, "total_steps": 4334, "loss": 0.3562, "lr": 4.41331296582902e-06, "epoch": 1.1670126874279123, "percentage": 58.38, "elapsed_time": "14:37:00", "remaining_time": "10:25:20"}
259
+ {"current_steps": 2540, "total_steps": 4334, "loss": 0.3049, "lr": 4.373333832178478e-06, "epoch": 1.1716262975778546, "percentage": 58.61, "elapsed_time": "14:40:23", "remaining_time": "10:21:49"}
260
+ {"current_steps": 2550, "total_steps": 4334, "loss": 0.3223, "lr": 4.333395361997521e-06, "epoch": 1.176239907727797, "percentage": 58.84, "elapsed_time": "14:43:30", "remaining_time": "10:18:06"}
261
+ {"current_steps": 2560, "total_steps": 4334, "loss": 0.2913, "lr": 4.293500146836241e-06, "epoch": 1.1808535178777393, "percentage": 59.07, "elapsed_time": "14:46:56", "remaining_time": "10:14:37"}
262
+ {"current_steps": 2570, "total_steps": 4334, "loss": 0.2823, "lr": 4.25365077543798e-06, "epoch": 1.1854671280276816, "percentage": 59.3, "elapsed_time": "14:50:16", "remaining_time": "10:11:03"}
263
+ {"current_steps": 2580, "total_steps": 4334, "loss": 0.3583, "lr": 4.213849833571341e-06, "epoch": 1.190080738177624, "percentage": 59.53, "elapsed_time": "14:53:43", "remaining_time": "10:07:35"}
264
+ {"current_steps": 2590, "total_steps": 4334, "loss": 0.3101, "lr": 4.174099903862403e-06, "epoch": 1.1946943483275663, "percentage": 59.76, "elapsed_time": "14:57:02", "remaining_time": "10:04:02"}
265
+ {"current_steps": 2600, "total_steps": 4334, "loss": 0.3311, "lr": 4.134403565627144e-06, "epoch": 1.1993079584775086, "percentage": 59.99, "elapsed_time": "15:00:22", "remaining_time": "10:00:28"}
266
+ {"current_steps": 2610, "total_steps": 4334, "loss": 0.3437, "lr": 4.0947633947040616e-06, "epoch": 1.203921568627451, "percentage": 60.22, "elapsed_time": "15:03:31", "remaining_time": "9:56:48"}
267
+ {"current_steps": 2620, "total_steps": 4334, "loss": 0.2788, "lr": 4.055181963287044e-06, "epoch": 1.2085351787773932, "percentage": 60.45, "elapsed_time": "15:07:06", "remaining_time": "9:53:25"}
268
+ {"current_steps": 2630, "total_steps": 4334, "loss": 0.3188, "lr": 4.01566183975845e-06, "epoch": 1.2131487889273356, "percentage": 60.68, "elapsed_time": "15:10:23", "remaining_time": "9:49:51"}
269
+ {"current_steps": 2640, "total_steps": 4334, "loss": 0.2936, "lr": 3.9762055885224614e-06, "epoch": 1.217762399077278, "percentage": 60.91, "elapsed_time": "15:13:35", "remaining_time": "9:46:13"}
270
+ {"current_steps": 2650, "total_steps": 4334, "loss": 0.266, "lr": 3.936815769838682e-06, "epoch": 1.2223760092272202, "percentage": 61.14, "elapsed_time": "15:16:39", "remaining_time": "9:42:30"}
271
+ {"current_steps": 2660, "total_steps": 4334, "loss": 0.315, "lr": 3.897494939655996e-06, "epoch": 1.2269896193771626, "percentage": 61.38, "elapsed_time": "15:19:39", "remaining_time": "9:38:45"}
272
+ {"current_steps": 2670, "total_steps": 4334, "loss": 0.3161, "lr": 3.8582456494467214e-06, "epoch": 1.2316032295271049, "percentage": 61.61, "elapsed_time": "15:23:04", "remaining_time": "9:35:16"}
273
+ {"current_steps": 2680, "total_steps": 4334, "loss": 0.3216, "lr": 3.819070446041059e-06, "epoch": 1.2362168396770472, "percentage": 61.84, "elapsed_time": "15:26:08", "remaining_time": "9:31:34"}
274
+ {"current_steps": 2690, "total_steps": 4334, "loss": 0.3184, "lr": 3.779971871461813e-06, "epoch": 1.2408304498269895, "percentage": 62.07, "elapsed_time": "15:29:15", "remaining_time": "9:27:55"}
275
+ {"current_steps": 2700, "total_steps": 4334, "loss": 0.3097, "lr": 3.7409524627594607e-06, "epoch": 1.2454440599769319, "percentage": 62.3, "elapsed_time": "15:32:38", "remaining_time": "9:24:25"}
276
+ {"current_steps": 2710, "total_steps": 4334, "loss": 0.2805, "lr": 3.702014751847514e-06, "epoch": 1.2500576701268744, "percentage": 62.53, "elapsed_time": "15:35:42", "remaining_time": "9:20:44"}
277
+ {"current_steps": 2720, "total_steps": 4334, "loss": 0.3199, "lr": 3.6631612653382354e-06, "epoch": 1.2546712802768165, "percentage": 62.76, "elapsed_time": "15:39:02", "remaining_time": "9:17:12"}
278
+ {"current_steps": 2730, "total_steps": 4334, "loss": 0.3204, "lr": 3.624394524378684e-06, "epoch": 1.259284890426759, "percentage": 62.99, "elapsed_time": "15:42:21", "remaining_time": "9:13:40"}
279
+ {"current_steps": 2740, "total_steps": 4334, "loss": 0.3378, "lr": 3.585717044487126e-06, "epoch": 1.2638985005767012, "percentage": 63.22, "elapsed_time": "15:45:30", "remaining_time": "9:10:02"}
280
+ {"current_steps": 2750, "total_steps": 4334, "loss": 0.3073, "lr": 3.5471313353898056e-06, "epoch": 1.2685121107266437, "percentage": 63.45, "elapsed_time": "15:48:32", "remaining_time": "9:06:21"}
281
+ {"current_steps": 2760, "total_steps": 4334, "loss": 0.3255, "lr": 3.5086399008580885e-06, "epoch": 1.2731257208765858, "percentage": 63.68, "elapsed_time": "15:51:39", "remaining_time": "9:02:43"}
282
+ {"current_steps": 2770, "total_steps": 4334, "loss": 0.3108, "lr": 3.470245238546002e-06, "epoch": 1.2777393310265284, "percentage": 63.91, "elapsed_time": "15:54:37", "remaining_time": "8:59:00"}
283
+ {"current_steps": 2780, "total_steps": 4334, "loss": 0.2944, "lr": 3.4319498398281638e-06, "epoch": 1.2823529411764705, "percentage": 64.14, "elapsed_time": "15:57:43", "remaining_time": "8:55:21"}
284
+ {"current_steps": 2790, "total_steps": 4334, "loss": 0.3167, "lr": 3.393756189638115e-06, "epoch": 1.286966551326413, "percentage": 64.37, "elapsed_time": "16:00:49", "remaining_time": "8:51:43"}
285
+ {"current_steps": 2800, "total_steps": 4334, "loss": 0.3009, "lr": 3.355666766307084e-06, "epoch": 1.2915801614763551, "percentage": 64.61, "elapsed_time": "16:03:54", "remaining_time": "8:48:04"}
286
+ {"current_steps": 2810, "total_steps": 4334, "loss": 0.2878, "lr": 3.3176840414031653e-06, "epoch": 1.2961937716262977, "percentage": 64.84, "elapsed_time": "16:06:57", "remaining_time": "8:44:25"}
287
+ {"current_steps": 2820, "total_steps": 4334, "loss": 0.2743, "lr": 3.2798104795709484e-06, "epoch": 1.3008073817762398, "percentage": 65.07, "elapsed_time": "16:10:16", "remaining_time": "8:40:55"}
288
+ {"current_steps": 2830, "total_steps": 4334, "loss": 0.3117, "lr": 3.242048538371585e-06, "epoch": 1.3054209919261823, "percentage": 65.3, "elapsed_time": "16:13:17", "remaining_time": "8:37:15"}
289
+ {"current_steps": 2840, "total_steps": 4334, "loss": 0.3065, "lr": 3.2044006681233226e-06, "epoch": 1.3100346020761244, "percentage": 65.53, "elapsed_time": "16:16:27", "remaining_time": "8:33:40"}
290
+ {"current_steps": 2850, "total_steps": 4334, "loss": 0.3182, "lr": 3.1668693117425128e-06, "epoch": 1.314648212226067, "percentage": 65.76, "elapsed_time": "16:19:44", "remaining_time": "8:30:09"}
291
+ {"current_steps": 2860, "total_steps": 4334, "loss": 0.3362, "lr": 3.1294569045850844e-06, "epoch": 1.3192618223760093, "percentage": 65.99, "elapsed_time": "16:22:53", "remaining_time": "8:26:33"}
292
+ {"current_steps": 2870, "total_steps": 4334, "loss": 0.3202, "lr": 3.092165874288525e-06, "epoch": 1.3238754325259516, "percentage": 66.22, "elapsed_time": "16:26:05", "remaining_time": "8:23:00"}
293
+ {"current_steps": 2880, "total_steps": 4334, "loss": 0.3061, "lr": 3.05499864061435e-06, "epoch": 1.328489042675894, "percentage": 66.45, "elapsed_time": "16:29:20", "remaining_time": "8:19:28"}
294
+ {"current_steps": 2890, "total_steps": 4334, "loss": 0.2937, "lr": 3.017957615291088e-06, "epoch": 1.3331026528258363, "percentage": 66.68, "elapsed_time": "16:32:48", "remaining_time": "8:16:03"}
295
+ {"current_steps": 2900, "total_steps": 4334, "loss": 0.3056, "lr": 2.981045201857796e-06, "epoch": 1.3377162629757786, "percentage": 66.91, "elapsed_time": "16:36:02", "remaining_time": "8:12:31"}
296
+ {"current_steps": 2910, "total_steps": 4334, "loss": 0.2964, "lr": 2.9442637955080787e-06, "epoch": 1.342329873125721, "percentage": 67.14, "elapsed_time": "16:39:20", "remaining_time": "8:09:01"}
297
+ {"current_steps": 2920, "total_steps": 4334, "loss": 0.322, "lr": 2.9076157829346883e-06, "epoch": 1.3469434832756633, "percentage": 67.37, "elapsed_time": "16:42:26", "remaining_time": "8:05:25"}
298
+ {"current_steps": 2930, "total_steps": 4334, "loss": 0.3186, "lr": 2.871103542174637e-06, "epoch": 1.3515570934256056, "percentage": 67.6, "elapsed_time": "16:45:28", "remaining_time": "8:01:48"}
299
+ {"current_steps": 2940, "total_steps": 4334, "loss": 0.2989, "lr": 2.8347294424549075e-06, "epoch": 1.356170703575548, "percentage": 67.84, "elapsed_time": "16:48:39", "remaining_time": "7:58:15"}
300
+ {"current_steps": 2950, "total_steps": 4334, "loss": 0.3095, "lr": 2.7984958440387045e-06, "epoch": 1.3607843137254902, "percentage": 68.07, "elapsed_time": "16:51:48", "remaining_time": "7:54:41"}
301
+ {"current_steps": 2960, "total_steps": 4334, "loss": 0.3209, "lr": 2.7624050980723032e-06, "epoch": 1.3653979238754326, "percentage": 68.3, "elapsed_time": "16:54:56", "remaining_time": "7:51:07"}
302
+ {"current_steps": 2970, "total_steps": 4334, "loss": 0.3238, "lr": 2.726459546432488e-06, "epoch": 1.370011534025375, "percentage": 68.53, "elapsed_time": "16:57:57", "remaining_time": "7:47:30"}
303
+ {"current_steps": 2980, "total_steps": 4334, "loss": 0.2856, "lr": 2.690661521574596e-06, "epoch": 1.3746251441753172, "percentage": 68.76, "elapsed_time": "17:00:55", "remaining_time": "7:43:52"}
304
+ {"current_steps": 2990, "total_steps": 4334, "loss": 0.3145, "lr": 2.655013346381158e-06, "epoch": 1.3792387543252596, "percentage": 68.99, "elapsed_time": "17:04:17", "remaining_time": "7:40:25"}
305
+ {"current_steps": 3000, "total_steps": 4334, "loss": 0.2938, "lr": 2.6195173340111767e-06, "epoch": 1.3838523644752019, "percentage": 69.22, "elapsed_time": "17:07:25", "remaining_time": "7:36:51"}
306
+ {"current_steps": 3000, "total_steps": 4334, "eval_loss": 0.3571609854698181, "epoch": 1.3838523644752019, "percentage": 69.22, "elapsed_time": "17:14:12", "remaining_time": "7:39:52"}
307
+ {"current_steps": 3010, "total_steps": 4334, "loss": 0.2978, "lr": 2.5841757877500245e-06, "epoch": 1.3884659746251442, "percentage": 69.45, "elapsed_time": "17:19:48", "remaining_time": "7:37:22"}
308
+ {"current_steps": 3020, "total_steps": 4334, "loss": 0.2824, "lr": 2.548991000859997e-06, "epoch": 1.3930795847750865, "percentage": 69.68, "elapsed_time": "17:22:59", "remaining_time": "7:33:48"}
309
+ {"current_steps": 3030, "total_steps": 4334, "loss": 0.3256, "lr": 2.513965256431488e-06, "epoch": 1.3976931949250289, "percentage": 69.91, "elapsed_time": "17:26:07", "remaining_time": "7:30:12"}
310
+ {"current_steps": 3040, "total_steps": 4334, "loss": 0.3297, "lr": 2.4791008272348656e-06, "epoch": 1.4023068050749712, "percentage": 70.14, "elapsed_time": "17:29:18", "remaining_time": "7:26:38"}
311
+ {"current_steps": 3050, "total_steps": 4334, "loss": 0.3279, "lr": 2.444399975572974e-06, "epoch": 1.4069204152249135, "percentage": 70.37, "elapsed_time": "17:32:17", "remaining_time": "7:22:59"}
312
+ {"current_steps": 3060, "total_steps": 4334, "loss": 0.3103, "lr": 2.40986495313435e-06, "epoch": 1.4115340253748558, "percentage": 70.6, "elapsed_time": "17:35:18", "remaining_time": "7:19:21"}
313
+ {"current_steps": 3070, "total_steps": 4334, "loss": 0.3231, "lr": 2.3754980008471074e-06, "epoch": 1.4161476355247982, "percentage": 70.84, "elapsed_time": "17:38:29", "remaining_time": "7:15:48"}
314
+ {"current_steps": 3080, "total_steps": 4334, "loss": 0.3138, "lr": 2.3413013487335332e-06, "epoch": 1.4207612456747405, "percentage": 71.07, "elapsed_time": "17:41:49", "remaining_time": "7:12:18"}
315
+ {"current_steps": 3090, "total_steps": 4334, "loss": 0.2695, "lr": 2.307277215765377e-06, "epoch": 1.4253748558246828, "percentage": 71.3, "elapsed_time": "17:44:58", "remaining_time": "7:08:44"}
316
+ {"current_steps": 3100, "total_steps": 4334, "loss": 0.2983, "lr": 2.273427809719867e-06, "epoch": 1.4299884659746251, "percentage": 71.53, "elapsed_time": "17:48:03", "remaining_time": "7:05:09"}
317
+ {"current_steps": 3110, "total_steps": 4334, "loss": 0.3141, "lr": 2.2397553270364546e-06, "epoch": 1.4346020761245675, "percentage": 71.76, "elapsed_time": "17:51:12", "remaining_time": "7:01:35"}
318
+ {"current_steps": 3120, "total_steps": 4334, "loss": 0.2959, "lr": 2.206261952674284e-06, "epoch": 1.4392156862745098, "percentage": 71.99, "elapsed_time": "17:54:23", "remaining_time": "6:58:02"}
319
+ {"current_steps": 3130, "total_steps": 4334, "loss": 0.3348, "lr": 2.172949859970422e-06, "epoch": 1.4438292964244521, "percentage": 72.22, "elapsed_time": "17:57:21", "remaining_time": "6:54:25"}
320
+ {"current_steps": 3140, "total_steps": 4334, "loss": 0.3098, "lr": 2.1398212104988273e-06, "epoch": 1.4484429065743945, "percentage": 72.45, "elapsed_time": "18:00:29", "remaining_time": "6:50:51"}
321
+ {"current_steps": 3150, "total_steps": 4334, "loss": 0.2701, "lr": 2.1068781539300874e-06, "epoch": 1.4530565167243368, "percentage": 72.68, "elapsed_time": "18:03:42", "remaining_time": "6:47:20"}
322
+ {"current_steps": 3160, "total_steps": 4334, "loss": 0.3135, "lr": 2.0741228278919347e-06, "epoch": 1.457670126874279, "percentage": 72.91, "elapsed_time": "18:06:57", "remaining_time": "6:43:49"}
323
+ {"current_steps": 3170, "total_steps": 4334, "loss": 0.3234, "lr": 2.0415573578305343e-06, "epoch": 1.4622837370242214, "percentage": 73.14, "elapsed_time": "18:10:03", "remaining_time": "6:40:15"}
324
+ {"current_steps": 3180, "total_steps": 4334, "loss": 0.3034, "lr": 2.0091838568725685e-06, "epoch": 1.4668973471741638, "percentage": 73.37, "elapsed_time": "18:13:17", "remaining_time": "6:36:44"}
325
+ {"current_steps": 3190, "total_steps": 4334, "loss": 0.3403, "lr": 1.977004425688126e-06, "epoch": 1.471510957324106, "percentage": 73.6, "elapsed_time": "18:16:23", "remaining_time": "6:33:11"}
326
+ {"current_steps": 3200, "total_steps": 4334, "loss": 0.3235, "lr": 1.945021152354379e-06, "epoch": 1.4761245674740484, "percentage": 73.83, "elapsed_time": "18:19:42", "remaining_time": "6:29:42"}
327
+ {"current_steps": 3210, "total_steps": 4334, "loss": 0.2852, "lr": 1.913236112220101e-06, "epoch": 1.4807381776239907, "percentage": 74.07, "elapsed_time": "18:22:51", "remaining_time": "6:26:10"}
328
+ {"current_steps": 3220, "total_steps": 4334, "loss": 0.3362, "lr": 1.8816513677709935e-06, "epoch": 1.485351787773933, "percentage": 74.3, "elapsed_time": "18:26:10", "remaining_time": "6:22:41"}
329
+ {"current_steps": 3230, "total_steps": 4334, "loss": 0.2814, "lr": 1.8502689684958664e-06, "epoch": 1.4899653979238754, "percentage": 74.53, "elapsed_time": "18:29:11", "remaining_time": "6:19:07"}
330
+ {"current_steps": 3240, "total_steps": 4334, "loss": 0.3092, "lr": 1.8190909507536326e-06, "epoch": 1.4945790080738177, "percentage": 74.76, "elapsed_time": "18:32:29", "remaining_time": "6:15:38"}
331
+ {"current_steps": 3250, "total_steps": 4334, "loss": 0.2931, "lr": 1.7881193376411822e-06, "epoch": 1.49919261822376, "percentage": 74.99, "elapsed_time": "18:35:35", "remaining_time": "6:12:05"}
332
+ {"current_steps": 3260, "total_steps": 4334, "loss": 0.2852, "lr": 1.7573561388621102e-06, "epoch": 1.5038062283737024, "percentage": 75.22, "elapsed_time": "18:38:53", "remaining_time": "6:08:36"}
333
+ {"current_steps": 3270, "total_steps": 4334, "loss": 0.3252, "lr": 1.7268033505962972e-06, "epoch": 1.5084198385236447, "percentage": 75.45, "elapsed_time": "18:42:10", "remaining_time": "6:05:08"}
334
+ {"current_steps": 3280, "total_steps": 4334, "loss": 0.2965, "lr": 1.6964629553703893e-06, "epoch": 1.5130334486735872, "percentage": 75.68, "elapsed_time": "18:45:40", "remaining_time": "6:01:43"}
335
+ {"current_steps": 3290, "total_steps": 4334, "loss": 0.3256, "lr": 1.6663369219291558e-06, "epoch": 1.5176470588235293, "percentage": 75.91, "elapsed_time": "18:49:01", "remaining_time": "5:58:16"}
336
+ {"current_steps": 3300, "total_steps": 4334, "loss": 0.3087, "lr": 1.6364272051077335e-06, "epoch": 1.522260668973472, "percentage": 76.14, "elapsed_time": "18:52:09", "remaining_time": "5:54:44"}
337
+ {"current_steps": 3310, "total_steps": 4334, "loss": 0.3082, "lr": 1.606735745704784e-06, "epoch": 1.526874279123414, "percentage": 76.37, "elapsed_time": "18:55:26", "remaining_time": "5:51:16"}
338
+ {"current_steps": 3320, "total_steps": 4334, "loss": 0.3089, "lr": 1.5772644703565564e-06, "epoch": 1.5314878892733566, "percentage": 76.6, "elapsed_time": "18:58:47", "remaining_time": "5:47:48"}
339
+ {"current_steps": 3330, "total_steps": 4334, "loss": 0.312, "lr": 1.5480152914118784e-06, "epoch": 1.5361014994232987, "percentage": 76.83, "elapsed_time": "19:02:02", "remaining_time": "5:44:19"}
340
+ {"current_steps": 3340, "total_steps": 4334, "loss": 0.2756, "lr": 1.5189901068080536e-06, "epoch": 1.5407151095732412, "percentage": 77.07, "elapsed_time": "19:05:10", "remaining_time": "5:40:48"}
341
+ {"current_steps": 3350, "total_steps": 4334, "loss": 0.2931, "lr": 1.4901907999477167e-06, "epoch": 1.5453287197231833, "percentage": 77.3, "elapsed_time": "19:08:23", "remaining_time": "5:37:19"}
342
+ {"current_steps": 3360, "total_steps": 4334, "loss": 0.3312, "lr": 1.4616192395766189e-06, "epoch": 1.5499423298731259, "percentage": 77.53, "elapsed_time": "19:11:30", "remaining_time": "5:33:48"}
343
+ {"current_steps": 3370, "total_steps": 4334, "loss": 0.2877, "lr": 1.4332772796623655e-06, "epoch": 1.554555940023068, "percentage": 77.76, "elapsed_time": "19:14:29", "remaining_time": "5:30:14"}
344
+ {"current_steps": 3380, "total_steps": 4334, "loss": 0.2865, "lr": 1.405166759274123e-06, "epoch": 1.5591695501730105, "percentage": 77.99, "elapsed_time": "19:17:53", "remaining_time": "5:26:48"}
345
+ {"current_steps": 3390, "total_steps": 4334, "loss": 0.2772, "lr": 1.3772895024632753e-06, "epoch": 1.5637831603229526, "percentage": 78.22, "elapsed_time": "19:21:05", "remaining_time": "5:23:19"}
346
+ {"current_steps": 3400, "total_steps": 4334, "loss": 0.2826, "lr": 1.349647318145067e-06, "epoch": 1.5683967704728952, "percentage": 78.45, "elapsed_time": "19:24:18", "remaining_time": "5:19:50"}
347
+ {"current_steps": 3410, "total_steps": 4334, "loss": 0.2582, "lr": 1.3222419999812248e-06, "epoch": 1.5730103806228373, "percentage": 78.68, "elapsed_time": "19:27:37", "remaining_time": "5:16:23"}
348
+ {"current_steps": 3420, "total_steps": 4334, "loss": 0.3361, "lr": 1.2950753262635712e-06, "epoch": 1.5776239907727798, "percentage": 78.91, "elapsed_time": "19:30:50", "remaining_time": "5:12:54"}
349
+ {"current_steps": 3430, "total_steps": 4334, "loss": 0.305, "lr": 1.2681490597986313e-06, "epoch": 1.582237600922722, "percentage": 79.14, "elapsed_time": "19:34:01", "remaining_time": "5:09:25"}
350
+ {"current_steps": 3440, "total_steps": 4334, "loss": 0.2935, "lr": 1.2414649477932511e-06, "epoch": 1.5868512110726645, "percentage": 79.37, "elapsed_time": "19:37:29", "remaining_time": "5:06:00"}
351
+ {"current_steps": 3450, "total_steps": 4334, "loss": 0.3227, "lr": 1.2150247217412186e-06, "epoch": 1.5914648212226066, "percentage": 79.6, "elapsed_time": "19:40:53", "remaining_time": "5:02:34"}
352
+ {"current_steps": 3460, "total_steps": 4334, "loss": 0.3037, "lr": 1.1888300973109112e-06, "epoch": 1.5960784313725491, "percentage": 79.83, "elapsed_time": "19:44:21", "remaining_time": "4:59:10"}
353
+ {"current_steps": 3470, "total_steps": 4334, "loss": 0.3172, "lr": 1.1628827742339688e-06, "epoch": 1.6006920415224912, "percentage": 80.06, "elapsed_time": "19:47:39", "remaining_time": "4:55:42"}
354
+ {"current_steps": 3480, "total_steps": 4334, "loss": 0.3046, "lr": 1.1371844361950045e-06, "epoch": 1.6053056516724338, "percentage": 80.3, "elapsed_time": "19:50:51", "remaining_time": "4:52:14"}
355
+ {"current_steps": 3490, "total_steps": 4334, "loss": 0.3107, "lr": 1.1117367507223452e-06, "epoch": 1.6099192618223759, "percentage": 80.53, "elapsed_time": "19:54:06", "remaining_time": "4:48:46"}
356
+ {"current_steps": 3500, "total_steps": 4334, "loss": 0.2891, "lr": 1.0865413690798321e-06, "epoch": 1.6145328719723184, "percentage": 80.76, "elapsed_time": "19:57:18", "remaining_time": "4:45:17"}
357
+ {"current_steps": 3500, "total_steps": 4334, "eval_loss": 0.3522779047489166, "epoch": 1.6145328719723184, "percentage": 80.76, "elapsed_time": "20:04:03", "remaining_time": "4:46:54"}
358
+ {"current_steps": 3510, "total_steps": 4334, "loss": 0.3177, "lr": 1.061599926159676e-06, "epoch": 1.6191464821222605, "percentage": 80.99, "elapsed_time": "20:09:43", "remaining_time": "4:43:59"}
359
+ {"current_steps": 3520, "total_steps": 4334, "loss": 0.2951, "lr": 1.036914040376364e-06, "epoch": 1.623760092272203, "percentage": 81.22, "elapsed_time": "20:12:53", "remaining_time": "4:40:28"}
360
+ {"current_steps": 3530, "total_steps": 4334, "loss": 0.273, "lr": 1.0124853135616475e-06, "epoch": 1.6283737024221452, "percentage": 81.45, "elapsed_time": "20:16:17", "remaining_time": "4:37:01"}
361
+ {"current_steps": 3540, "total_steps": 4334, "loss": 0.307, "lr": 9.883153308606035e-07, "epoch": 1.6329873125720877, "percentage": 81.68, "elapsed_time": "20:19:31", "remaining_time": "4:33:31"}
362
+ {"current_steps": 3550, "total_steps": 4334, "loss": 0.3031, "lr": 9.644056606287727e-07, "epoch": 1.6376009227220298, "percentage": 81.91, "elapsed_time": "20:22:56", "remaining_time": "4:30:04"}
363
+ {"current_steps": 3560, "total_steps": 4334, "loss": 0.3121, "lr": 9.407578543303913e-07, "epoch": 1.6422145328719724, "percentage": 82.14, "elapsed_time": "20:26:04", "remaining_time": "4:26:34"}
364
+ {"current_steps": 3570, "total_steps": 4334, "loss": 0.2709, "lr": 9.173734464377204e-07, "epoch": 1.6468281430219145, "percentage": 82.37, "elapsed_time": "20:29:16", "remaining_time": "4:23:04"}
365
+ {"current_steps": 3580, "total_steps": 4334, "loss": 0.2877, "lr": 8.942539543314799e-07, "epoch": 1.651441753171857, "percentage": 82.6, "elapsed_time": "20:32:21", "remaining_time": "4:19:33"}
366
+ {"current_steps": 3590, "total_steps": 4334, "loss": 0.306, "lr": 8.714008782023797e-07, "epoch": 1.6560553633217991, "percentage": 82.83, "elapsed_time": "20:35:31", "remaining_time": "4:16:03"}
367
+ {"current_steps": 3600, "total_steps": 4334, "loss": 0.3156, "lr": 8.488157009537796e-07, "epoch": 1.6606689734717417, "percentage": 83.06, "elapsed_time": "20:38:51", "remaining_time": "4:12:35"}
368
+ {"current_steps": 3610, "total_steps": 4334, "loss": 0.3164, "lr": 8.264998881054659e-07, "epoch": 1.665282583621684, "percentage": 83.29, "elapsed_time": "20:42:10", "remaining_time": "4:09:07"}
369
+ {"current_steps": 3620, "total_steps": 4334, "loss": 0.2823, "lr": 8.044548876985531e-07, "epoch": 1.6698961937716263, "percentage": 83.53, "elapsed_time": "20:45:26", "remaining_time": "4:05:38"}
370
+ {"current_steps": 3630, "total_steps": 4334, "loss": 0.3184, "lr": 7.826821302015275e-07, "epoch": 1.6745098039215687, "percentage": 83.76, "elapsed_time": "20:48:52", "remaining_time": "4:02:12"}
371
+ {"current_steps": 3640, "total_steps": 4334, "loss": 0.2956, "lr": 7.61183028417422e-07, "epoch": 1.679123414071511, "percentage": 83.99, "elapsed_time": "20:52:02", "remaining_time": "3:58:42"}
372
+ {"current_steps": 3650, "total_steps": 4334, "loss": 0.3187, "lr": 7.399589773921412e-07, "epoch": 1.6837370242214533, "percentage": 84.22, "elapsed_time": "20:55:15", "remaining_time": "3:55:13"}
373
+ {"current_steps": 3660, "total_steps": 4334, "loss": 0.3194, "lr": 7.190113543239408e-07, "epoch": 1.6883506343713957, "percentage": 84.45, "elapsed_time": "20:58:19", "remaining_time": "3:51:43"}
374
+ {"current_steps": 3670, "total_steps": 4334, "loss": 0.2958, "lr": 6.983415184740616e-07, "epoch": 1.692964244521338, "percentage": 84.68, "elapsed_time": "21:01:28", "remaining_time": "3:48:14"}
375
+ {"current_steps": 3680, "total_steps": 4334, "loss": 0.2761, "lr": 6.779508110785332e-07, "epoch": 1.6975778546712803, "percentage": 84.91, "elapsed_time": "21:04:40", "remaining_time": "3:44:45"}
376
+ {"current_steps": 3690, "total_steps": 4334, "loss": 0.2758, "lr": 6.578405552611361e-07, "epoch": 1.7021914648212226, "percentage": 85.14, "elapsed_time": "21:07:55", "remaining_time": "3:41:17"}
377
+ {"current_steps": 3700, "total_steps": 4334, "loss": 0.3272, "lr": 6.380120559475505e-07, "epoch": 1.706805074971165, "percentage": 85.37, "elapsed_time": "21:11:00", "remaining_time": "3:37:47"}
378
+ {"current_steps": 3710, "total_steps": 4334, "loss": 0.2947, "lr": 6.184665997806832e-07, "epoch": 1.7114186851211073, "percentage": 85.6, "elapsed_time": "21:14:15", "remaining_time": "3:34:19"}
379
+ {"current_steps": 3720, "total_steps": 4334, "loss": 0.3304, "lr": 5.992054550371723e-07, "epoch": 1.7160322952710496, "percentage": 85.83, "elapsed_time": "21:17:40", "remaining_time": "3:30:53"}
380
+ {"current_steps": 3730, "total_steps": 4334, "loss": 0.3214, "lr": 5.802298715451016e-07, "epoch": 1.720645905420992, "percentage": 86.06, "elapsed_time": "21:21:02", "remaining_time": "3:27:26"}
381
+ {"current_steps": 3740, "total_steps": 4334, "loss": 0.2974, "lr": 5.615410806028875e-07, "epoch": 1.7252595155709343, "percentage": 86.29, "elapsed_time": "21:24:16", "remaining_time": "3:23:58"}
382
+ {"current_steps": 3750, "total_steps": 4334, "loss": 0.2869, "lr": 5.431402948993947e-07, "epoch": 1.7298731257208766, "percentage": 86.53, "elapsed_time": "21:27:28", "remaining_time": "3:20:30"}
383
+ {"current_steps": 3760, "total_steps": 4334, "loss": 0.329, "lr": 5.250287084352373e-07, "epoch": 1.734486735870819, "percentage": 86.76, "elapsed_time": "21:30:50", "remaining_time": "3:17:03"}
384
+ {"current_steps": 3770, "total_steps": 4334, "loss": 0.3262, "lr": 5.072074964453055e-07, "epoch": 1.7391003460207612, "percentage": 86.99, "elapsed_time": "21:34:08", "remaining_time": "3:13:36"}
385
+ {"current_steps": 3780, "total_steps": 4334, "loss": 0.2963, "lr": 4.896778153225062e-07, "epoch": 1.7437139561707036, "percentage": 87.22, "elapsed_time": "21:37:23", "remaining_time": "3:10:08"}
386
+ {"current_steps": 3790, "total_steps": 4334, "loss": 0.3124, "lr": 4.7244080254272795e-07, "epoch": 1.748327566320646, "percentage": 87.45, "elapsed_time": "21:40:37", "remaining_time": "3:06:41"}
387
+ {"current_steps": 3800, "total_steps": 4334, "loss": 0.293, "lr": 4.55497576591028e-07, "epoch": 1.7529411764705882, "percentage": 87.68, "elapsed_time": "21:43:47", "remaining_time": "3:03:13"}
388
+ {"current_steps": 3810, "total_steps": 4334, "loss": 0.3092, "lr": 4.3884923688905676e-07, "epoch": 1.7575547866205306, "percentage": 87.91, "elapsed_time": "21:47:02", "remaining_time": "2:59:45"}
389
+ {"current_steps": 3820, "total_steps": 4334, "loss": 0.2644, "lr": 4.224968637237198e-07, "epoch": 1.7621683967704729, "percentage": 88.14, "elapsed_time": "21:50:09", "remaining_time": "2:56:17"}
390
+ {"current_steps": 3830, "total_steps": 4334, "loss": 0.2823, "lr": 4.064415181770787e-07, "epoch": 1.7667820069204152, "percentage": 88.37, "elapsed_time": "21:53:32", "remaining_time": "2:52:51"}
391
+ {"current_steps": 3840, "total_steps": 4334, "loss": 0.3121, "lr": 3.90684242057498e-07, "epoch": 1.7713956170703575, "percentage": 88.6, "elapsed_time": "21:57:00", "remaining_time": "2:49:25"}
392
+ {"current_steps": 3850, "total_steps": 4334, "loss": 0.3145, "lr": 3.752260578320427e-07, "epoch": 1.7760092272202999, "percentage": 88.83, "elapsed_time": "22:00:34", "remaining_time": "2:46:00"}
393
+ {"current_steps": 3860, "total_steps": 4334, "loss": 0.3086, "lr": 3.600679685601349e-07, "epoch": 1.7806228373702422, "percentage": 89.06, "elapsed_time": "22:03:41", "remaining_time": "2:42:32"}
394
+ {"current_steps": 3870, "total_steps": 4334, "loss": 0.2978, "lr": 3.4521095782846623e-07, "epoch": 1.7852364475201845, "percentage": 89.29, "elapsed_time": "22:06:57", "remaining_time": "2:39:05"}
395
+ {"current_steps": 3880, "total_steps": 4334, "loss": 0.3016, "lr": 3.306559896871714e-07, "epoch": 1.7898500576701268, "percentage": 89.52, "elapsed_time": "22:10:15", "remaining_time": "2:35:39"}
396
+ {"current_steps": 3890, "total_steps": 4334, "loss": 0.3066, "lr": 3.164040085872755e-07, "epoch": 1.7944636678200692, "percentage": 89.76, "elapsed_time": "22:13:30", "remaining_time": "2:32:12"}
397
+ {"current_steps": 3900, "total_steps": 4334, "loss": 0.2851, "lr": 3.0245593931940766e-07, "epoch": 1.7990772779700115, "percentage": 89.99, "elapsed_time": "22:16:39", "remaining_time": "2:28:44"}
398
+ {"current_steps": 3910, "total_steps": 4334, "loss": 0.2901, "lr": 2.8881268695379436e-07, "epoch": 1.8036908881199538, "percentage": 90.22, "elapsed_time": "22:20:02", "remaining_time": "2:25:18"}
399
+ {"current_steps": 3920, "total_steps": 4334, "loss": 0.2997, "lr": 2.7547513678153005e-07, "epoch": 1.8083044982698961, "percentage": 90.45, "elapsed_time": "22:23:12", "remaining_time": "2:21:51"}
400
+ {"current_steps": 3930, "total_steps": 4334, "loss": 0.3282, "lr": 2.624441542571327e-07, "epoch": 1.8129181084198385, "percentage": 90.68, "elapsed_time": "22:26:12", "remaining_time": "2:18:23"}
401
+ {"current_steps": 3940, "total_steps": 4334, "loss": 0.2596, "lr": 2.497205849423834e-07, "epoch": 1.8175317185697808, "percentage": 90.91, "elapsed_time": "22:29:12", "remaining_time": "2:14:55"}
402
+ {"current_steps": 3950, "total_steps": 4334, "loss": 0.3077, "lr": 2.3730525445146146e-07, "epoch": 1.8221453287197233, "percentage": 91.14, "elapsed_time": "22:32:19", "remaining_time": "2:11:27"}
403
+ {"current_steps": 3960, "total_steps": 4334, "loss": 0.3411, "lr": 2.25198968397371e-07, "epoch": 1.8267589388696654, "percentage": 91.37, "elapsed_time": "22:35:35", "remaining_time": "2:08:01"}
404
+ {"current_steps": 3970, "total_steps": 4334, "loss": 0.2816, "lr": 2.134025123396638e-07, "epoch": 1.831372549019608, "percentage": 91.6, "elapsed_time": "22:38:49", "remaining_time": "2:04:35"}
405
+ {"current_steps": 3980, "total_steps": 4334, "loss": 0.3093, "lr": 2.019166517334703e-07, "epoch": 1.83598615916955, "percentage": 91.83, "elapsed_time": "22:41:53", "remaining_time": "2:01:08"}
406
+ {"current_steps": 3990, "total_steps": 4334, "loss": 0.2848, "lr": 1.9074213187982416e-07, "epoch": 1.8405997693194927, "percentage": 92.06, "elapsed_time": "22:45:02", "remaining_time": "1:57:41"}
407
+ {"current_steps": 4000, "total_steps": 4334, "loss": 0.3013, "lr": 1.7987967787730541e-07, "epoch": 1.8452133794694348, "percentage": 92.29, "elapsed_time": "22:47:59", "remaining_time": "1:54:13"}
408
+ {"current_steps": 4000, "total_steps": 4334, "eval_loss": 0.3492071032524109, "epoch": 1.8452133794694348, "percentage": 92.29, "elapsed_time": "22:54:52", "remaining_time": "1:54:48"}
409
+ {"current_steps": 4010, "total_steps": 4334, "loss": 0.2951, "lr": 1.6932999457498823e-07, "epoch": 1.8498269896193773, "percentage": 92.52, "elapsed_time": "23:00:20", "remaining_time": "1:51:31"}
410
+ {"current_steps": 4020, "total_steps": 4334, "loss": 0.2518, "lr": 1.5909376652670283e-07, "epoch": 1.8544405997693194, "percentage": 92.75, "elapsed_time": "23:03:22", "remaining_time": "1:48:03"}
411
+ {"current_steps": 4030, "total_steps": 4334, "loss": 0.3032, "lr": 1.4917165794661849e-07, "epoch": 1.859054209919262, "percentage": 92.99, "elapsed_time": "23:06:37", "remaining_time": "1:44:35"}
412
+ {"current_steps": 4040, "total_steps": 4334, "loss": 0.2954, "lr": 1.395643126661428e-07, "epoch": 1.863667820069204, "percentage": 93.22, "elapsed_time": "23:09:48", "remaining_time": "1:41:08"}
413
+ {"current_steps": 4050, "total_steps": 4334, "loss": 0.2756, "lr": 1.302723540921419e-07, "epoch": 1.8682814302191466, "percentage": 93.45, "elapsed_time": "23:13:14", "remaining_time": "1:37:41"}
414
+ {"current_steps": 4060, "total_steps": 4334, "loss": 0.2792, "lr": 1.212963851664928e-07, "epoch": 1.8728950403690887, "percentage": 93.68, "elapsed_time": "23:16:34", "remaining_time": "1:34:15"}
415
+ {"current_steps": 4070, "total_steps": 4334, "loss": 0.3286, "lr": 1.1263698832695513e-07, "epoch": 1.8775086505190313, "percentage": 93.91, "elapsed_time": "23:19:58", "remaining_time": "1:30:48"}
416
+ {"current_steps": 4080, "total_steps": 4334, "loss": 0.2919, "lr": 1.0429472546938158e-07, "epoch": 1.8821222606689734, "percentage": 94.14, "elapsed_time": "23:23:08", "remaining_time": "1:27:21"}
417
+ {"current_steps": 4090, "total_steps": 4334, "loss": 0.3285, "lr": 9.627013791125294e-08, "epoch": 1.886735870818916, "percentage": 94.37, "elapsed_time": "23:26:08", "remaining_time": "1:23:53"}
418
+ {"current_steps": 4100, "total_steps": 4334, "loss": 0.3172, "lr": 8.856374635655696e-08, "epoch": 1.891349480968858, "percentage": 94.6, "elapsed_time": "23:29:22", "remaining_time": "1:20:26"}
419
+ {"current_steps": 4110, "total_steps": 4334, "loss": 0.2838, "lr": 8.117605086199686e-08, "epoch": 1.8959630911188006, "percentage": 94.83, "elapsed_time": "23:32:32", "remaining_time": "1:16:59"}
420
+ {"current_steps": 4120, "total_steps": 4334, "loss": 0.3001, "lr": 7.410753080454746e-08, "epoch": 1.9005767012687427, "percentage": 95.06, "elapsed_time": "23:35:46", "remaining_time": "1:13:32"}
421
+ {"current_steps": 4130, "total_steps": 4334, "loss": 0.2853, "lr": 6.735864485034493e-08, "epoch": 1.9051903114186852, "percentage": 95.29, "elapsed_time": "23:38:54", "remaining_time": "1:10:05"}
422
+ {"current_steps": 4140, "total_steps": 4334, "loss": 0.3221, "lr": 6.092983092492844e-08, "epoch": 1.9098039215686273, "percentage": 95.52, "elapsed_time": "23:42:24", "remaining_time": "1:06:39"}
423
+ {"current_steps": 4150, "total_steps": 4334, "loss": 0.2815, "lr": 5.482150618481952e-08, "epoch": 1.9144175317185699, "percentage": 95.75, "elapsed_time": "23:45:39", "remaining_time": "1:03:12"}
424
+ {"current_steps": 4160, "total_steps": 4334, "loss": 0.3051, "lr": 4.9034066990457094e-08, "epoch": 1.919031141868512, "percentage": 95.99, "elapsed_time": "23:48:54", "remaining_time": "0:59:45"}
425
+ {"current_steps": 4170, "total_steps": 4334, "loss": 0.301, "lr": 4.356788888047747e-08, "epoch": 1.9236447520184545, "percentage": 96.22, "elapsed_time": "23:52:06", "remaining_time": "0:56:19"}
426
+ {"current_steps": 4180, "total_steps": 4334, "loss": 0.297, "lr": 3.8423326547344376e-08, "epoch": 1.9282583621683966, "percentage": 96.45, "elapsed_time": "23:55:31", "remaining_time": "0:52:53"}
427
+ {"current_steps": 4190, "total_steps": 4334, "loss": 0.309, "lr": 3.360071381433516e-08, "epoch": 1.9328719723183392, "percentage": 96.68, "elapsed_time": "23:58:57", "remaining_time": "0:49:27"}
428
+ {"current_steps": 4200, "total_steps": 4334, "loss": 0.2899, "lr": 2.9100363613879246e-08, "epoch": 1.9374855824682813, "percentage": 96.91, "elapsed_time": "1 day, 0:02:17", "remaining_time": "0:46:00"}
429
+ {"current_steps": 4210, "total_steps": 4334, "loss": 0.2905, "lr": 2.492256796725212e-08, "epoch": 1.9420991926182238, "percentage": 97.14, "elapsed_time": "1 day, 0:05:27", "remaining_time": "0:42:34"}
430
+ {"current_steps": 4220, "total_steps": 4334, "loss": 0.3042, "lr": 2.1067597965624963e-08, "epoch": 1.946712802768166, "percentage": 97.37, "elapsed_time": "1 day, 0:08:43", "remaining_time": "0:39:08"}
431
+ {"current_steps": 4230, "total_steps": 4334, "loss": 0.2793, "lr": 1.753570375247815e-08, "epoch": 1.9513264129181085, "percentage": 97.6, "elapsed_time": "1 day, 0:12:08", "remaining_time": "0:35:42"}
432
+ {"current_steps": 4240, "total_steps": 4334, "loss": 0.3012, "lr": 1.4327114507365347e-08, "epoch": 1.9559400230680506, "percentage": 97.83, "elapsed_time": "1 day, 0:15:39", "remaining_time": "0:32:16"}
433
+ {"current_steps": 4250, "total_steps": 4334, "loss": 0.2987, "lr": 1.1442038431044856e-08, "epoch": 1.9605536332179931, "percentage": 98.06, "elapsed_time": "1 day, 0:18:56", "remaining_time": "0:28:50"}
434
+ {"current_steps": 4260, "total_steps": 4334, "loss": 0.3028, "lr": 8.880662731968748e-09, "epoch": 1.9651672433679352, "percentage": 98.29, "elapsed_time": "1 day, 0:22:11", "remaining_time": "0:25:23"}
435
+ {"current_steps": 4270, "total_steps": 4334, "loss": 0.327, "lr": 6.6431536141348115e-09, "epoch": 1.9697808535178778, "percentage": 98.52, "elapsed_time": "1 day, 0:25:25", "remaining_time": "0:21:57"}
436
+ {"current_steps": 4280, "total_steps": 4334, "loss": 0.2861, "lr": 4.729656266304061e-09, "epoch": 1.9743944636678201, "percentage": 98.75, "elapsed_time": "1 day, 0:28:35", "remaining_time": "0:18:31"}
437
+ {"current_steps": 4290, "total_steps": 4334, "loss": 0.3224, "lr": 3.1402948525766085e-09, "epoch": 1.9790080738177624, "percentage": 98.98, "elapsed_time": "1 day, 0:31:38", "remaining_time": "0:15:05"}
438
+ {"current_steps": 4300, "total_steps": 4334, "loss": 0.2968, "lr": 1.8751725043375526e-09, "epoch": 1.9836216839677048, "percentage": 99.22, "elapsed_time": "1 day, 0:34:48", "remaining_time": "0:11:39"}
439
+ {"current_steps": 4310, "total_steps": 4334, "loss": 0.3063, "lr": 9.343713135623323e-10, "epoch": 1.988235294117647, "percentage": 99.45, "elapsed_time": "1 day, 0:38:07", "remaining_time": "0:08:13"}
440
+ {"current_steps": 4320, "total_steps": 4334, "loss": 0.2943, "lr": 3.1795232749320947e-10, "epoch": 1.9928489042675894, "percentage": 99.68, "elapsed_time": "1 day, 0:41:19", "remaining_time": "0:04:48"}
441
+ {"current_steps": 4330, "total_steps": 4334, "loss": 0.2894, "lr": 2.5955544673550438e-11, "epoch": 1.9974625144175318, "percentage": 99.91, "elapsed_time": "1 day, 0:44:46", "remaining_time": "0:01:22"}
442
+ {"current_steps": 4334, "total_steps": 4334, "epoch": 1.9993079584775086, "percentage": 100.0, "elapsed_time": "1 day, 0:48:13", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,3137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9993079584775086,
5
+ "eval_steps": 500,
6
+ "global_step": 4334,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.00461361014994233,
13
+ "grad_norm": 5.300843715667725,
14
+ "learning_rate": 2.3041474654377884e-07,
15
+ "loss": 0.806,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.00922722029988466,
20
+ "grad_norm": 5.792849540710449,
21
+ "learning_rate": 4.608294930875577e-07,
22
+ "loss": 0.8251,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.01384083044982699,
27
+ "grad_norm": 4.814388751983643,
28
+ "learning_rate": 6.912442396313365e-07,
29
+ "loss": 0.7555,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.01845444059976932,
34
+ "grad_norm": 2.745297431945801,
35
+ "learning_rate": 9.216589861751154e-07,
36
+ "loss": 0.7218,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.02306805074971165,
41
+ "grad_norm": 1.7126681804656982,
42
+ "learning_rate": 1.1520737327188942e-06,
43
+ "loss": 0.6751,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.02768166089965398,
48
+ "grad_norm": 1.703934669494629,
49
+ "learning_rate": 1.382488479262673e-06,
50
+ "loss": 0.6197,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.03229527104959631,
55
+ "grad_norm": 1.7122575044631958,
56
+ "learning_rate": 1.6129032258064516e-06,
57
+ "loss": 0.5083,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.03690888119953864,
62
+ "grad_norm": 2.8078525066375732,
63
+ "learning_rate": 1.8433179723502307e-06,
64
+ "loss": 0.5326,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.04152249134948097,
69
+ "grad_norm": 1.8321082592010498,
70
+ "learning_rate": 2.0737327188940094e-06,
71
+ "loss": 0.5463,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.0461361014994233,
76
+ "grad_norm": 1.547890305519104,
77
+ "learning_rate": 2.3041474654377884e-06,
78
+ "loss": 0.5445,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.05074971164936563,
83
+ "grad_norm": 1.5186538696289062,
84
+ "learning_rate": 2.5345622119815673e-06,
85
+ "loss": 0.4972,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.05536332179930796,
90
+ "grad_norm": 1.6117104291915894,
91
+ "learning_rate": 2.764976958525346e-06,
92
+ "loss": 0.519,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.05997693194925029,
97
+ "grad_norm": 1.8595815896987915,
98
+ "learning_rate": 2.9953917050691243e-06,
99
+ "loss": 0.453,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.06459054209919261,
104
+ "grad_norm": 1.2419018745422363,
105
+ "learning_rate": 3.225806451612903e-06,
106
+ "loss": 0.4799,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.06920415224913495,
111
+ "grad_norm": 1.5790776014328003,
112
+ "learning_rate": 3.4562211981566825e-06,
113
+ "loss": 0.4896,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.07381776239907728,
118
+ "grad_norm": 1.4088906049728394,
119
+ "learning_rate": 3.6866359447004615e-06,
120
+ "loss": 0.5167,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.0784313725490196,
125
+ "grad_norm": 1.2521541118621826,
126
+ "learning_rate": 3.91705069124424e-06,
127
+ "loss": 0.5127,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.08304498269896193,
132
+ "grad_norm": 2.0803122520446777,
133
+ "learning_rate": 4.147465437788019e-06,
134
+ "loss": 0.4465,
135
+ "step": 180
136
+ },
137
+ {
138
+ "epoch": 0.08765859284890427,
139
+ "grad_norm": 1.523786187171936,
140
+ "learning_rate": 4.377880184331797e-06,
141
+ "loss": 0.4761,
142
+ "step": 190
143
+ },
144
+ {
145
+ "epoch": 0.0922722029988466,
146
+ "grad_norm": 1.418717622756958,
147
+ "learning_rate": 4.608294930875577e-06,
148
+ "loss": 0.5055,
149
+ "step": 200
150
+ },
151
+ {
152
+ "epoch": 0.09688581314878893,
153
+ "grad_norm": 1.483023762702942,
154
+ "learning_rate": 4.838709677419355e-06,
155
+ "loss": 0.4557,
156
+ "step": 210
157
+ },
158
+ {
159
+ "epoch": 0.10149942329873125,
160
+ "grad_norm": 1.5981467962265015,
161
+ "learning_rate": 5.0691244239631346e-06,
162
+ "loss": 0.4686,
163
+ "step": 220
164
+ },
165
+ {
166
+ "epoch": 0.1061130334486736,
167
+ "grad_norm": 1.130113959312439,
168
+ "learning_rate": 5.299539170506913e-06,
169
+ "loss": 0.4843,
170
+ "step": 230
171
+ },
172
+ {
173
+ "epoch": 0.11072664359861592,
174
+ "grad_norm": 1.8060191869735718,
175
+ "learning_rate": 5.529953917050692e-06,
176
+ "loss": 0.5015,
177
+ "step": 240
178
+ },
179
+ {
180
+ "epoch": 0.11534025374855825,
181
+ "grad_norm": 1.069687008857727,
182
+ "learning_rate": 5.76036866359447e-06,
183
+ "loss": 0.4622,
184
+ "step": 250
185
+ },
186
+ {
187
+ "epoch": 0.11995386389850057,
188
+ "grad_norm": 1.1288576126098633,
189
+ "learning_rate": 5.9907834101382485e-06,
190
+ "loss": 0.4642,
191
+ "step": 260
192
+ },
193
+ {
194
+ "epoch": 0.1245674740484429,
195
+ "grad_norm": 1.7959188222885132,
196
+ "learning_rate": 6.221198156682028e-06,
197
+ "loss": 0.4357,
198
+ "step": 270
199
+ },
200
+ {
201
+ "epoch": 0.12918108419838523,
202
+ "grad_norm": 1.1214991807937622,
203
+ "learning_rate": 6.451612903225806e-06,
204
+ "loss": 0.4547,
205
+ "step": 280
206
+ },
207
+ {
208
+ "epoch": 0.13379469434832755,
209
+ "grad_norm": 1.6405386924743652,
210
+ "learning_rate": 6.682027649769586e-06,
211
+ "loss": 0.4464,
212
+ "step": 290
213
+ },
214
+ {
215
+ "epoch": 0.1384083044982699,
216
+ "grad_norm": 1.422194004058838,
217
+ "learning_rate": 6.912442396313365e-06,
218
+ "loss": 0.4957,
219
+ "step": 300
220
+ },
221
+ {
222
+ "epoch": 0.14302191464821223,
223
+ "grad_norm": 1.5487645864486694,
224
+ "learning_rate": 7.1428571428571436e-06,
225
+ "loss": 0.4473,
226
+ "step": 310
227
+ },
228
+ {
229
+ "epoch": 0.14763552479815456,
230
+ "grad_norm": 1.3289945125579834,
231
+ "learning_rate": 7.373271889400923e-06,
232
+ "loss": 0.4831,
233
+ "step": 320
234
+ },
235
+ {
236
+ "epoch": 0.1522491349480969,
237
+ "grad_norm": 1.6698657274246216,
238
+ "learning_rate": 7.603686635944701e-06,
239
+ "loss": 0.4996,
240
+ "step": 330
241
+ },
242
+ {
243
+ "epoch": 0.1568627450980392,
244
+ "grad_norm": 1.4635429382324219,
245
+ "learning_rate": 7.83410138248848e-06,
246
+ "loss": 0.4861,
247
+ "step": 340
248
+ },
249
+ {
250
+ "epoch": 0.16147635524798154,
251
+ "grad_norm": 1.1806988716125488,
252
+ "learning_rate": 8.064516129032258e-06,
253
+ "loss": 0.4774,
254
+ "step": 350
255
+ },
256
+ {
257
+ "epoch": 0.16608996539792387,
258
+ "grad_norm": 1.4619848728179932,
259
+ "learning_rate": 8.294930875576038e-06,
260
+ "loss": 0.505,
261
+ "step": 360
262
+ },
263
+ {
264
+ "epoch": 0.1707035755478662,
265
+ "grad_norm": 1.3232295513153076,
266
+ "learning_rate": 8.525345622119815e-06,
267
+ "loss": 0.5116,
268
+ "step": 370
269
+ },
270
+ {
271
+ "epoch": 0.17531718569780855,
272
+ "grad_norm": 1.5264451503753662,
273
+ "learning_rate": 8.755760368663595e-06,
274
+ "loss": 0.4307,
275
+ "step": 380
276
+ },
277
+ {
278
+ "epoch": 0.17993079584775087,
279
+ "grad_norm": 1.2139824628829956,
280
+ "learning_rate": 8.986175115207374e-06,
281
+ "loss": 0.4525,
282
+ "step": 390
283
+ },
284
+ {
285
+ "epoch": 0.1845444059976932,
286
+ "grad_norm": 1.5944982767105103,
287
+ "learning_rate": 9.216589861751153e-06,
288
+ "loss": 0.4557,
289
+ "step": 400
290
+ },
291
+ {
292
+ "epoch": 0.18915801614763553,
293
+ "grad_norm": 1.3352501392364502,
294
+ "learning_rate": 9.447004608294931e-06,
295
+ "loss": 0.4927,
296
+ "step": 410
297
+ },
298
+ {
299
+ "epoch": 0.19377162629757785,
300
+ "grad_norm": 1.0890029668807983,
301
+ "learning_rate": 9.67741935483871e-06,
302
+ "loss": 0.4926,
303
+ "step": 420
304
+ },
305
+ {
306
+ "epoch": 0.19838523644752018,
307
+ "grad_norm": 1.675134539604187,
308
+ "learning_rate": 9.90783410138249e-06,
309
+ "loss": 0.4373,
310
+ "step": 430
311
+ },
312
+ {
313
+ "epoch": 0.2029988465974625,
314
+ "grad_norm": 1.240625023841858,
315
+ "learning_rate": 9.999941600087643e-06,
316
+ "loss": 0.4877,
317
+ "step": 440
318
+ },
319
+ {
320
+ "epoch": 0.20761245674740483,
321
+ "grad_norm": 1.6467055082321167,
322
+ "learning_rate": 9.999584716674727e-06,
323
+ "loss": 0.4628,
324
+ "step": 450
325
+ },
326
+ {
327
+ "epoch": 0.2122260668973472,
328
+ "grad_norm": 1.0588369369506836,
329
+ "learning_rate": 9.998903417374228e-06,
330
+ "loss": 0.4757,
331
+ "step": 460
332
+ },
333
+ {
334
+ "epoch": 0.21683967704728951,
335
+ "grad_norm": 1.150065302848816,
336
+ "learning_rate": 9.997897746394684e-06,
337
+ "loss": 0.486,
338
+ "step": 470
339
+ },
340
+ {
341
+ "epoch": 0.22145328719723184,
342
+ "grad_norm": 1.382214903831482,
343
+ "learning_rate": 9.996567768992642e-06,
344
+ "loss": 0.4862,
345
+ "step": 480
346
+ },
347
+ {
348
+ "epoch": 0.22606689734717417,
349
+ "grad_norm": 1.066063642501831,
350
+ "learning_rate": 9.994913571468432e-06,
351
+ "loss": 0.486,
352
+ "step": 490
353
+ },
354
+ {
355
+ "epoch": 0.2306805074971165,
356
+ "grad_norm": 1.6976910829544067,
357
+ "learning_rate": 9.992935261160559e-06,
358
+ "loss": 0.4468,
359
+ "step": 500
360
+ },
361
+ {
362
+ "epoch": 0.2306805074971165,
363
+ "eval_loss": 0.39874735474586487,
364
+ "eval_runtime": 400.8977,
365
+ "eval_samples_per_second": 8.409,
366
+ "eval_steps_per_second": 1.202,
367
+ "step": 500
368
+ },
369
+ {
370
+ "epoch": 0.23529411764705882,
371
+ "grad_norm": 1.5809332132339478,
372
+ "learning_rate": 9.990632966438743e-06,
373
+ "loss": 0.4171,
374
+ "step": 510
375
+ },
376
+ {
377
+ "epoch": 0.23990772779700115,
378
+ "grad_norm": 1.19710111618042,
379
+ "learning_rate": 9.988006836695593e-06,
380
+ "loss": 0.4764,
381
+ "step": 520
382
+ },
383
+ {
384
+ "epoch": 0.24452133794694347,
385
+ "grad_norm": 1.0074009895324707,
386
+ "learning_rate": 9.985057042336898e-06,
387
+ "loss": 0.4678,
388
+ "step": 530
389
+ },
390
+ {
391
+ "epoch": 0.2491349480968858,
392
+ "grad_norm": 1.1784993410110474,
393
+ "learning_rate": 9.981783774770595e-06,
394
+ "loss": 0.4524,
395
+ "step": 540
396
+ },
397
+ {
398
+ "epoch": 0.2537485582468281,
399
+ "grad_norm": 1.5833553075790405,
400
+ "learning_rate": 9.97818724639432e-06,
401
+ "loss": 0.4399,
402
+ "step": 550
403
+ },
404
+ {
405
+ "epoch": 0.25836216839677045,
406
+ "grad_norm": 1.1442863941192627,
407
+ "learning_rate": 9.974267690581646e-06,
408
+ "loss": 0.4897,
409
+ "step": 560
410
+ },
411
+ {
412
+ "epoch": 0.2629757785467128,
413
+ "grad_norm": 1.2376320362091064,
414
+ "learning_rate": 9.970025361666934e-06,
415
+ "loss": 0.4497,
416
+ "step": 570
417
+ },
418
+ {
419
+ "epoch": 0.2675893886966551,
420
+ "grad_norm": 1.0321787595748901,
421
+ "learning_rate": 9.965460534928827e-06,
422
+ "loss": 0.4832,
423
+ "step": 580
424
+ },
425
+ {
426
+ "epoch": 0.2722029988465975,
427
+ "grad_norm": 1.4388171434402466,
428
+ "learning_rate": 9.960573506572391e-06,
429
+ "loss": 0.4788,
430
+ "step": 590
431
+ },
432
+ {
433
+ "epoch": 0.2768166089965398,
434
+ "grad_norm": 1.7396047115325928,
435
+ "learning_rate": 9.95536459370989e-06,
436
+ "loss": 0.4484,
437
+ "step": 600
438
+ },
439
+ {
440
+ "epoch": 0.28143021914648214,
441
+ "grad_norm": 1.2319186925888062,
442
+ "learning_rate": 9.949834134340219e-06,
443
+ "loss": 0.4799,
444
+ "step": 610
445
+ },
446
+ {
447
+ "epoch": 0.28604382929642447,
448
+ "grad_norm": 1.3797781467437744,
449
+ "learning_rate": 9.94398248732696e-06,
450
+ "loss": 0.4506,
451
+ "step": 620
452
+ },
453
+ {
454
+ "epoch": 0.2906574394463668,
455
+ "grad_norm": 1.5929468870162964,
456
+ "learning_rate": 9.9378100323751e-06,
457
+ "loss": 0.5108,
458
+ "step": 630
459
+ },
460
+ {
461
+ "epoch": 0.2952710495963091,
462
+ "grad_norm": 1.4704443216323853,
463
+ "learning_rate": 9.931317170006398e-06,
464
+ "loss": 0.4607,
465
+ "step": 640
466
+ },
467
+ {
468
+ "epoch": 0.29988465974625145,
469
+ "grad_norm": 1.980751872062683,
470
+ "learning_rate": 9.924504321533387e-06,
471
+ "loss": 0.4564,
472
+ "step": 650
473
+ },
474
+ {
475
+ "epoch": 0.3044982698961938,
476
+ "grad_norm": 1.143687129020691,
477
+ "learning_rate": 9.91737192903204e-06,
478
+ "loss": 0.447,
479
+ "step": 660
480
+ },
481
+ {
482
+ "epoch": 0.3091118800461361,
483
+ "grad_norm": 1.0242226123809814,
484
+ "learning_rate": 9.909920455313087e-06,
485
+ "loss": 0.4712,
486
+ "step": 670
487
+ },
488
+ {
489
+ "epoch": 0.3137254901960784,
490
+ "grad_norm": 1.3482781648635864,
491
+ "learning_rate": 9.902150383891979e-06,
492
+ "loss": 0.4831,
493
+ "step": 680
494
+ },
495
+ {
496
+ "epoch": 0.31833910034602075,
497
+ "grad_norm": 1.1587884426116943,
498
+ "learning_rate": 9.894062218957517e-06,
499
+ "loss": 0.499,
500
+ "step": 690
501
+ },
502
+ {
503
+ "epoch": 0.3229527104959631,
504
+ "grad_norm": 1.1495003700256348,
505
+ "learning_rate": 9.885656485339129e-06,
506
+ "loss": 0.4482,
507
+ "step": 700
508
+ },
509
+ {
510
+ "epoch": 0.3275663206459054,
511
+ "grad_norm": 1.0492511987686157,
512
+ "learning_rate": 9.876933728472826e-06,
513
+ "loss": 0.4763,
514
+ "step": 710
515
+ },
516
+ {
517
+ "epoch": 0.33217993079584773,
518
+ "grad_norm": 1.2462799549102783,
519
+ "learning_rate": 9.867894514365802e-06,
520
+ "loss": 0.4707,
521
+ "step": 720
522
+ },
523
+ {
524
+ "epoch": 0.33679354094579006,
525
+ "grad_norm": 0.9900134205818176,
526
+ "learning_rate": 9.858539429559705e-06,
527
+ "loss": 0.472,
528
+ "step": 730
529
+ },
530
+ {
531
+ "epoch": 0.3414071510957324,
532
+ "grad_norm": 1.1227929592132568,
533
+ "learning_rate": 9.848869081092581e-06,
534
+ "loss": 0.497,
535
+ "step": 740
536
+ },
537
+ {
538
+ "epoch": 0.3460207612456747,
539
+ "grad_norm": 1.3729174137115479,
540
+ "learning_rate": 9.838884096459486e-06,
541
+ "loss": 0.4271,
542
+ "step": 750
543
+ },
544
+ {
545
+ "epoch": 0.3506343713956171,
546
+ "grad_norm": 1.1883389949798584,
547
+ "learning_rate": 9.828585123571763e-06,
548
+ "loss": 0.4977,
549
+ "step": 760
550
+ },
551
+ {
552
+ "epoch": 0.3552479815455594,
553
+ "grad_norm": 1.181907296180725,
554
+ "learning_rate": 9.817972830715003e-06,
555
+ "loss": 0.4605,
556
+ "step": 770
557
+ },
558
+ {
559
+ "epoch": 0.35986159169550175,
560
+ "grad_norm": 0.9973541498184204,
561
+ "learning_rate": 9.807047906505683e-06,
562
+ "loss": 0.4414,
563
+ "step": 780
564
+ },
565
+ {
566
+ "epoch": 0.3644752018454441,
567
+ "grad_norm": 1.148622989654541,
568
+ "learning_rate": 9.795811059846476e-06,
569
+ "loss": 0.4372,
570
+ "step": 790
571
+ },
572
+ {
573
+ "epoch": 0.3690888119953864,
574
+ "grad_norm": 0.8280909061431885,
575
+ "learning_rate": 9.78426301988026e-06,
576
+ "loss": 0.486,
577
+ "step": 800
578
+ },
579
+ {
580
+ "epoch": 0.3737024221453287,
581
+ "grad_norm": 1.027239203453064,
582
+ "learning_rate": 9.772404535942802e-06,
583
+ "loss": 0.4645,
584
+ "step": 810
585
+ },
586
+ {
587
+ "epoch": 0.37831603229527105,
588
+ "grad_norm": 1.01218581199646,
589
+ "learning_rate": 9.760236377514128e-06,
590
+ "loss": 0.4503,
591
+ "step": 820
592
+ },
593
+ {
594
+ "epoch": 0.3829296424452134,
595
+ "grad_norm": 1.0366742610931396,
596
+ "learning_rate": 9.747759334168602e-06,
597
+ "loss": 0.4602,
598
+ "step": 830
599
+ },
600
+ {
601
+ "epoch": 0.3875432525951557,
602
+ "grad_norm": 1.3200701475143433,
603
+ "learning_rate": 9.734974215523684e-06,
604
+ "loss": 0.4535,
605
+ "step": 840
606
+ },
607
+ {
608
+ "epoch": 0.39215686274509803,
609
+ "grad_norm": 1.200269103050232,
610
+ "learning_rate": 9.721881851187406e-06,
611
+ "loss": 0.4569,
612
+ "step": 850
613
+ },
614
+ {
615
+ "epoch": 0.39677047289504036,
616
+ "grad_norm": 1.3125215768814087,
617
+ "learning_rate": 9.708483090704524e-06,
618
+ "loss": 0.4359,
619
+ "step": 860
620
+ },
621
+ {
622
+ "epoch": 0.4013840830449827,
623
+ "grad_norm": 0.9996836185455322,
624
+ "learning_rate": 9.694778803501404e-06,
625
+ "loss": 0.4573,
626
+ "step": 870
627
+ },
628
+ {
629
+ "epoch": 0.405997693194925,
630
+ "grad_norm": 1.1388590335845947,
631
+ "learning_rate": 9.680769878829606e-06,
632
+ "loss": 0.4737,
633
+ "step": 880
634
+ },
635
+ {
636
+ "epoch": 0.41061130334486734,
637
+ "grad_norm": 1.0428895950317383,
638
+ "learning_rate": 9.666457225708175e-06,
639
+ "loss": 0.4532,
640
+ "step": 890
641
+ },
642
+ {
643
+ "epoch": 0.41522491349480967,
644
+ "grad_norm": 1.0916321277618408,
645
+ "learning_rate": 9.65184177286466e-06,
646
+ "loss": 0.4475,
647
+ "step": 900
648
+ },
649
+ {
650
+ "epoch": 0.419838523644752,
651
+ "grad_norm": 1.0428664684295654,
652
+ "learning_rate": 9.636924468674856e-06,
653
+ "loss": 0.4866,
654
+ "step": 910
655
+ },
656
+ {
657
+ "epoch": 0.4244521337946944,
658
+ "grad_norm": 1.1310166120529175,
659
+ "learning_rate": 9.62170628110125e-06,
660
+ "loss": 0.4437,
661
+ "step": 920
662
+ },
663
+ {
664
+ "epoch": 0.4290657439446367,
665
+ "grad_norm": 1.1518645286560059,
666
+ "learning_rate": 9.606188197630224e-06,
667
+ "loss": 0.4349,
668
+ "step": 930
669
+ },
670
+ {
671
+ "epoch": 0.43367935409457903,
672
+ "grad_norm": 1.1595722436904907,
673
+ "learning_rate": 9.590371225207981e-06,
674
+ "loss": 0.4917,
675
+ "step": 940
676
+ },
677
+ {
678
+ "epoch": 0.43829296424452135,
679
+ "grad_norm": 1.127502202987671,
680
+ "learning_rate": 9.574256390175192e-06,
681
+ "loss": 0.4682,
682
+ "step": 950
683
+ },
684
+ {
685
+ "epoch": 0.4429065743944637,
686
+ "grad_norm": 0.9480658769607544,
687
+ "learning_rate": 9.557844738200408e-06,
688
+ "loss": 0.4912,
689
+ "step": 960
690
+ },
691
+ {
692
+ "epoch": 0.447520184544406,
693
+ "grad_norm": 1.121092677116394,
694
+ "learning_rate": 9.541137334212212e-06,
695
+ "loss": 0.461,
696
+ "step": 970
697
+ },
698
+ {
699
+ "epoch": 0.45213379469434833,
700
+ "grad_norm": 1.1865901947021484,
701
+ "learning_rate": 9.524135262330098e-06,
702
+ "loss": 0.4414,
703
+ "step": 980
704
+ },
705
+ {
706
+ "epoch": 0.45674740484429066,
707
+ "grad_norm": 1.2148582935333252,
708
+ "learning_rate": 9.506839625794152e-06,
709
+ "loss": 0.4457,
710
+ "step": 990
711
+ },
712
+ {
713
+ "epoch": 0.461361014994233,
714
+ "grad_norm": 1.0445005893707275,
715
+ "learning_rate": 9.489251546893441e-06,
716
+ "loss": 0.4457,
717
+ "step": 1000
718
+ },
719
+ {
720
+ "epoch": 0.461361014994233,
721
+ "eval_loss": 0.386065274477005,
722
+ "eval_runtime": 400.8996,
723
+ "eval_samples_per_second": 8.409,
724
+ "eval_steps_per_second": 1.202,
725
+ "step": 1000
726
+ },
727
+ {
728
+ "epoch": 0.4659746251441753,
729
+ "grad_norm": 1.1309161186218262,
730
+ "learning_rate": 9.4713721668932e-06,
731
+ "loss": 0.4457,
732
+ "step": 1010
733
+ },
734
+ {
735
+ "epoch": 0.47058823529411764,
736
+ "grad_norm": 1.0543358325958252,
737
+ "learning_rate": 9.453202645960775e-06,
738
+ "loss": 0.4343,
739
+ "step": 1020
740
+ },
741
+ {
742
+ "epoch": 0.47520184544405997,
743
+ "grad_norm": 0.9181652665138245,
744
+ "learning_rate": 9.434744163090341e-06,
745
+ "loss": 0.402,
746
+ "step": 1030
747
+ },
748
+ {
749
+ "epoch": 0.4798154555940023,
750
+ "grad_norm": 0.8402210474014282,
751
+ "learning_rate": 9.415997916026401e-06,
752
+ "loss": 0.4742,
753
+ "step": 1040
754
+ },
755
+ {
756
+ "epoch": 0.4844290657439446,
757
+ "grad_norm": 0.9334933757781982,
758
+ "learning_rate": 9.396965121186058e-06,
759
+ "loss": 0.4487,
760
+ "step": 1050
761
+ },
762
+ {
763
+ "epoch": 0.48904267589388695,
764
+ "grad_norm": 1.047976016998291,
765
+ "learning_rate": 9.377647013580102e-06,
766
+ "loss": 0.449,
767
+ "step": 1060
768
+ },
769
+ {
770
+ "epoch": 0.4936562860438293,
771
+ "grad_norm": 1.0943602323532104,
772
+ "learning_rate": 9.358044846732848e-06,
773
+ "loss": 0.4591,
774
+ "step": 1070
775
+ },
776
+ {
777
+ "epoch": 0.4982698961937716,
778
+ "grad_norm": 1.235120177268982,
779
+ "learning_rate": 9.338159892600809e-06,
780
+ "loss": 0.457,
781
+ "step": 1080
782
+ },
783
+ {
784
+ "epoch": 0.5028835063437139,
785
+ "grad_norm": 1.1438275575637817,
786
+ "learning_rate": 9.317993441490163e-06,
787
+ "loss": 0.4863,
788
+ "step": 1090
789
+ },
790
+ {
791
+ "epoch": 0.5074971164936563,
792
+ "grad_norm": 1.0768276453018188,
793
+ "learning_rate": 9.297546801973027e-06,
794
+ "loss": 0.4863,
795
+ "step": 1100
796
+ },
797
+ {
798
+ "epoch": 0.5121107266435986,
799
+ "grad_norm": 0.9337517023086548,
800
+ "learning_rate": 9.276821300802535e-06,
801
+ "loss": 0.4445,
802
+ "step": 1110
803
+ },
804
+ {
805
+ "epoch": 0.5167243367935409,
806
+ "grad_norm": 1.2967017889022827,
807
+ "learning_rate": 9.255818282826755e-06,
808
+ "loss": 0.4654,
809
+ "step": 1120
810
+ },
811
+ {
812
+ "epoch": 0.5213379469434832,
813
+ "grad_norm": 1.256011962890625,
814
+ "learning_rate": 9.23453911090143e-06,
815
+ "loss": 0.4164,
816
+ "step": 1130
817
+ },
818
+ {
819
+ "epoch": 0.5259515570934256,
820
+ "grad_norm": 0.8023069500923157,
821
+ "learning_rate": 9.21298516580153e-06,
822
+ "loss": 0.4471,
823
+ "step": 1140
824
+ },
825
+ {
826
+ "epoch": 0.5305651672433679,
827
+ "grad_norm": 1.1097207069396973,
828
+ "learning_rate": 9.191157846131662e-06,
829
+ "loss": 0.438,
830
+ "step": 1150
831
+ },
832
+ {
833
+ "epoch": 0.5351787773933102,
834
+ "grad_norm": 1.0085464715957642,
835
+ "learning_rate": 9.169058568235324e-06,
836
+ "loss": 0.4754,
837
+ "step": 1160
838
+ },
839
+ {
840
+ "epoch": 0.5397923875432526,
841
+ "grad_norm": 1.2570720911026,
842
+ "learning_rate": 9.146688766102985e-06,
843
+ "loss": 0.4622,
844
+ "step": 1170
845
+ },
846
+ {
847
+ "epoch": 0.544405997693195,
848
+ "grad_norm": 0.8718953728675842,
849
+ "learning_rate": 9.124049891279052e-06,
850
+ "loss": 0.4778,
851
+ "step": 1180
852
+ },
853
+ {
854
+ "epoch": 0.5490196078431373,
855
+ "grad_norm": 1.1282092332839966,
856
+ "learning_rate": 9.101143412767665e-06,
857
+ "loss": 0.43,
858
+ "step": 1190
859
+ },
860
+ {
861
+ "epoch": 0.5536332179930796,
862
+ "grad_norm": 1.1799993515014648,
863
+ "learning_rate": 9.077970816937394e-06,
864
+ "loss": 0.4398,
865
+ "step": 1200
866
+ },
867
+ {
868
+ "epoch": 0.558246828143022,
869
+ "grad_norm": 1.0852458477020264,
870
+ "learning_rate": 9.05453360742477e-06,
871
+ "loss": 0.4509,
872
+ "step": 1210
873
+ },
874
+ {
875
+ "epoch": 0.5628604382929643,
876
+ "grad_norm": 0.9532956480979919,
877
+ "learning_rate": 9.030833305036732e-06,
878
+ "loss": 0.4322,
879
+ "step": 1220
880
+ },
881
+ {
882
+ "epoch": 0.5674740484429066,
883
+ "grad_norm": 1.1594491004943848,
884
+ "learning_rate": 9.006871447651941e-06,
885
+ "loss": 0.4463,
886
+ "step": 1230
887
+ },
888
+ {
889
+ "epoch": 0.5720876585928489,
890
+ "grad_norm": 1.0407638549804688,
891
+ "learning_rate": 8.982649590120982e-06,
892
+ "loss": 0.4744,
893
+ "step": 1240
894
+ },
895
+ {
896
+ "epoch": 0.5767012687427913,
897
+ "grad_norm": 0.9278146028518677,
898
+ "learning_rate": 8.95816930416548e-06,
899
+ "loss": 0.4506,
900
+ "step": 1250
901
+ },
902
+ {
903
+ "epoch": 0.5813148788927336,
904
+ "grad_norm": 1.0362186431884766,
905
+ "learning_rate": 8.933432178276108e-06,
906
+ "loss": 0.5047,
907
+ "step": 1260
908
+ },
909
+ {
910
+ "epoch": 0.5859284890426759,
911
+ "grad_norm": 1.1875795125961304,
912
+ "learning_rate": 8.908439817609514e-06,
913
+ "loss": 0.4331,
914
+ "step": 1270
915
+ },
916
+ {
917
+ "epoch": 0.5905420991926182,
918
+ "grad_norm": 1.0592981576919556,
919
+ "learning_rate": 8.883193843884169e-06,
920
+ "loss": 0.4869,
921
+ "step": 1280
922
+ },
923
+ {
924
+ "epoch": 0.5951557093425606,
925
+ "grad_norm": 1.037862777709961,
926
+ "learning_rate": 8.857695895275127e-06,
927
+ "loss": 0.4568,
928
+ "step": 1290
929
+ },
930
+ {
931
+ "epoch": 0.5997693194925029,
932
+ "grad_norm": 1.190478801727295,
933
+ "learning_rate": 8.831947626307735e-06,
934
+ "loss": 0.4291,
935
+ "step": 1300
936
+ },
937
+ {
938
+ "epoch": 0.6043829296424452,
939
+ "grad_norm": 1.168628215789795,
940
+ "learning_rate": 8.805950707750268e-06,
941
+ "loss": 0.4864,
942
+ "step": 1310
943
+ },
944
+ {
945
+ "epoch": 0.6089965397923875,
946
+ "grad_norm": 1.1069689989089966,
947
+ "learning_rate": 8.779706826505513e-06,
948
+ "loss": 0.4755,
949
+ "step": 1320
950
+ },
951
+ {
952
+ "epoch": 0.6136101499423299,
953
+ "grad_norm": 1.0567044019699097,
954
+ "learning_rate": 8.753217685501317e-06,
955
+ "loss": 0.4429,
956
+ "step": 1330
957
+ },
958
+ {
959
+ "epoch": 0.6182237600922722,
960
+ "grad_norm": 1.3095778226852417,
961
+ "learning_rate": 8.72648500358008e-06,
962
+ "loss": 0.4799,
963
+ "step": 1340
964
+ },
965
+ {
966
+ "epoch": 0.6228373702422145,
967
+ "grad_norm": 1.2443181276321411,
968
+ "learning_rate": 8.699510515387222e-06,
969
+ "loss": 0.4238,
970
+ "step": 1350
971
+ },
972
+ {
973
+ "epoch": 0.6274509803921569,
974
+ "grad_norm": 0.9751482605934143,
975
+ "learning_rate": 8.672295971258624e-06,
976
+ "loss": 0.4621,
977
+ "step": 1360
978
+ },
979
+ {
980
+ "epoch": 0.6320645905420992,
981
+ "grad_norm": 1.0337327718734741,
982
+ "learning_rate": 8.644843137107058e-06,
983
+ "loss": 0.482,
984
+ "step": 1370
985
+ },
986
+ {
987
+ "epoch": 0.6366782006920415,
988
+ "grad_norm": 1.4924689531326294,
989
+ "learning_rate": 8.617153794307588e-06,
990
+ "loss": 0.4138,
991
+ "step": 1380
992
+ },
993
+ {
994
+ "epoch": 0.6412918108419838,
995
+ "grad_norm": 1.1185983419418335,
996
+ "learning_rate": 8.58922973958199e-06,
997
+ "loss": 0.4808,
998
+ "step": 1390
999
+ },
1000
+ {
1001
+ "epoch": 0.6459054209919262,
1002
+ "grad_norm": 0.8987427949905396,
1003
+ "learning_rate": 8.561072784882156e-06,
1004
+ "loss": 0.4196,
1005
+ "step": 1400
1006
+ },
1007
+ {
1008
+ "epoch": 0.6505190311418685,
1009
+ "grad_norm": 0.9043972492218018,
1010
+ "learning_rate": 8.532684757272527e-06,
1011
+ "loss": 0.4675,
1012
+ "step": 1410
1013
+ },
1014
+ {
1015
+ "epoch": 0.6551326412918108,
1016
+ "grad_norm": 1.1553007364273071,
1017
+ "learning_rate": 8.504067498811533e-06,
1018
+ "loss": 0.4585,
1019
+ "step": 1420
1020
+ },
1021
+ {
1022
+ "epoch": 0.6597462514417531,
1023
+ "grad_norm": 1.2655616998672485,
1024
+ "learning_rate": 8.475222866432065e-06,
1025
+ "loss": 0.4557,
1026
+ "step": 1430
1027
+ },
1028
+ {
1029
+ "epoch": 0.6643598615916955,
1030
+ "grad_norm": 0.9298520684242249,
1031
+ "learning_rate": 8.446152731820984e-06,
1032
+ "loss": 0.4378,
1033
+ "step": 1440
1034
+ },
1035
+ {
1036
+ "epoch": 0.6689734717416378,
1037
+ "grad_norm": 1.0106518268585205,
1038
+ "learning_rate": 8.416858981297663e-06,
1039
+ "loss": 0.482,
1040
+ "step": 1450
1041
+ },
1042
+ {
1043
+ "epoch": 0.6735870818915801,
1044
+ "grad_norm": 0.7871996760368347,
1045
+ "learning_rate": 8.387343515691594e-06,
1046
+ "loss": 0.4153,
1047
+ "step": 1460
1048
+ },
1049
+ {
1050
+ "epoch": 0.6782006920415224,
1051
+ "grad_norm": 0.8685919046401978,
1052
+ "learning_rate": 8.357608250219046e-06,
1053
+ "loss": 0.4619,
1054
+ "step": 1470
1055
+ },
1056
+ {
1057
+ "epoch": 0.6828143021914648,
1058
+ "grad_norm": 1.3125975131988525,
1059
+ "learning_rate": 8.327655114358782e-06,
1060
+ "loss": 0.4327,
1061
+ "step": 1480
1062
+ },
1063
+ {
1064
+ "epoch": 0.6874279123414071,
1065
+ "grad_norm": 1.0396238565444946,
1066
+ "learning_rate": 8.297486051726864e-06,
1067
+ "loss": 0.4713,
1068
+ "step": 1490
1069
+ },
1070
+ {
1071
+ "epoch": 0.6920415224913494,
1072
+ "grad_norm": 0.7324469685554504,
1073
+ "learning_rate": 8.267103019950529e-06,
1074
+ "loss": 0.4197,
1075
+ "step": 1500
1076
+ },
1077
+ {
1078
+ "epoch": 0.6920415224913494,
1079
+ "eval_loss": 0.3744993507862091,
1080
+ "eval_runtime": 409.5922,
1081
+ "eval_samples_per_second": 8.23,
1082
+ "eval_steps_per_second": 1.177,
1083
+ "step": 1500
1084
+ },
1085
+ {
1086
+ "epoch": 0.6966551326412919,
1087
+ "grad_norm": 1.0459123849868774,
1088
+ "learning_rate": 8.23650799054117e-06,
1089
+ "loss": 0.4525,
1090
+ "step": 1510
1091
+ },
1092
+ {
1093
+ "epoch": 0.7012687427912342,
1094
+ "grad_norm": 0.972507655620575,
1095
+ "learning_rate": 8.2057029487664e-06,
1096
+ "loss": 0.4344,
1097
+ "step": 1520
1098
+ },
1099
+ {
1100
+ "epoch": 0.7058823529411765,
1101
+ "grad_norm": 0.9018703103065491,
1102
+ "learning_rate": 8.174689893521239e-06,
1103
+ "loss": 0.4456,
1104
+ "step": 1530
1105
+ },
1106
+ {
1107
+ "epoch": 0.7104959630911188,
1108
+ "grad_norm": 1.1698877811431885,
1109
+ "learning_rate": 8.143470837198394e-06,
1110
+ "loss": 0.4342,
1111
+ "step": 1540
1112
+ },
1113
+ {
1114
+ "epoch": 0.7151095732410612,
1115
+ "grad_norm": 0.8043988943099976,
1116
+ "learning_rate": 8.112047805557693e-06,
1117
+ "loss": 0.4407,
1118
+ "step": 1550
1119
+ },
1120
+ {
1121
+ "epoch": 0.7197231833910035,
1122
+ "grad_norm": 1.0644773244857788,
1123
+ "learning_rate": 8.080422837594627e-06,
1124
+ "loss": 0.4188,
1125
+ "step": 1560
1126
+ },
1127
+ {
1128
+ "epoch": 0.7243367935409458,
1129
+ "grad_norm": 1.3345856666564941,
1130
+ "learning_rate": 8.048597985408047e-06,
1131
+ "loss": 0.4594,
1132
+ "step": 1570
1133
+ },
1134
+ {
1135
+ "epoch": 0.7289504036908881,
1136
+ "grad_norm": 0.9245930910110474,
1137
+ "learning_rate": 8.016575314067005e-06,
1138
+ "loss": 0.4549,
1139
+ "step": 1580
1140
+ },
1141
+ {
1142
+ "epoch": 0.7335640138408305,
1143
+ "grad_norm": 0.8729799389839172,
1144
+ "learning_rate": 7.984356901476755e-06,
1145
+ "loss": 0.4548,
1146
+ "step": 1590
1147
+ },
1148
+ {
1149
+ "epoch": 0.7381776239907728,
1150
+ "grad_norm": 1.0106137990951538,
1151
+ "learning_rate": 7.951944838243916e-06,
1152
+ "loss": 0.4452,
1153
+ "step": 1600
1154
+ },
1155
+ {
1156
+ "epoch": 0.7427912341407151,
1157
+ "grad_norm": 1.196505069732666,
1158
+ "learning_rate": 7.919341227540828e-06,
1159
+ "loss": 0.4491,
1160
+ "step": 1610
1161
+ },
1162
+ {
1163
+ "epoch": 0.7474048442906575,
1164
+ "grad_norm": 1.1595311164855957,
1165
+ "learning_rate": 7.886548184969063e-06,
1166
+ "loss": 0.4731,
1167
+ "step": 1620
1168
+ },
1169
+ {
1170
+ "epoch": 0.7520184544405998,
1171
+ "grad_norm": 1.1693317890167236,
1172
+ "learning_rate": 7.85356783842216e-06,
1173
+ "loss": 0.432,
1174
+ "step": 1630
1175
+ },
1176
+ {
1177
+ "epoch": 0.7566320645905421,
1178
+ "grad_norm": 0.9775774478912354,
1179
+ "learning_rate": 7.820402327947543e-06,
1180
+ "loss": 0.461,
1181
+ "step": 1640
1182
+ },
1183
+ {
1184
+ "epoch": 0.7612456747404844,
1185
+ "grad_norm": 1.2050389051437378,
1186
+ "learning_rate": 7.78705380560766e-06,
1187
+ "loss": 0.4118,
1188
+ "step": 1650
1189
+ },
1190
+ {
1191
+ "epoch": 0.7658592848904268,
1192
+ "grad_norm": 0.976572573184967,
1193
+ "learning_rate": 7.753524435340334e-06,
1194
+ "loss": 0.445,
1195
+ "step": 1660
1196
+ },
1197
+ {
1198
+ "epoch": 0.7704728950403691,
1199
+ "grad_norm": 0.9844825863838196,
1200
+ "learning_rate": 7.719816392818354e-06,
1201
+ "loss": 0.453,
1202
+ "step": 1670
1203
+ },
1204
+ {
1205
+ "epoch": 0.7750865051903114,
1206
+ "grad_norm": 0.966995894908905,
1207
+ "learning_rate": 7.685931865308293e-06,
1208
+ "loss": 0.4424,
1209
+ "step": 1680
1210
+ },
1211
+ {
1212
+ "epoch": 0.7797001153402537,
1213
+ "grad_norm": 0.9509267807006836,
1214
+ "learning_rate": 7.651873051528582e-06,
1215
+ "loss": 0.4164,
1216
+ "step": 1690
1217
+ },
1218
+ {
1219
+ "epoch": 0.7843137254901961,
1220
+ "grad_norm": 0.89404296875,
1221
+ "learning_rate": 7.617642161506837e-06,
1222
+ "loss": 0.4345,
1223
+ "step": 1700
1224
+ },
1225
+ {
1226
+ "epoch": 0.7889273356401384,
1227
+ "grad_norm": 1.4994254112243652,
1228
+ "learning_rate": 7.583241416436462e-06,
1229
+ "loss": 0.4373,
1230
+ "step": 1710
1231
+ },
1232
+ {
1233
+ "epoch": 0.7935409457900807,
1234
+ "grad_norm": 1.2022879123687744,
1235
+ "learning_rate": 7.548673048532504e-06,
1236
+ "loss": 0.4146,
1237
+ "step": 1720
1238
+ },
1239
+ {
1240
+ "epoch": 0.798154555940023,
1241
+ "grad_norm": 1.1147469282150269,
1242
+ "learning_rate": 7.513939300886816e-06,
1243
+ "loss": 0.4008,
1244
+ "step": 1730
1245
+ },
1246
+ {
1247
+ "epoch": 0.8027681660899654,
1248
+ "grad_norm": 0.9766092896461487,
1249
+ "learning_rate": 7.479042427322509e-06,
1250
+ "loss": 0.4401,
1251
+ "step": 1740
1252
+ },
1253
+ {
1254
+ "epoch": 0.8073817762399077,
1255
+ "grad_norm": 1.0522454977035522,
1256
+ "learning_rate": 7.443984692247701e-06,
1257
+ "loss": 0.4565,
1258
+ "step": 1750
1259
+ },
1260
+ {
1261
+ "epoch": 0.81199538638985,
1262
+ "grad_norm": 0.9872923493385315,
1263
+ "learning_rate": 7.408768370508577e-06,
1264
+ "loss": 0.432,
1265
+ "step": 1760
1266
+ },
1267
+ {
1268
+ "epoch": 0.8166089965397924,
1269
+ "grad_norm": 0.729234516620636,
1270
+ "learning_rate": 7.373395747241792e-06,
1271
+ "loss": 0.3847,
1272
+ "step": 1770
1273
+ },
1274
+ {
1275
+ "epoch": 0.8212226066897347,
1276
+ "grad_norm": 0.9378695487976074,
1277
+ "learning_rate": 7.337869117726176e-06,
1278
+ "loss": 0.412,
1279
+ "step": 1780
1280
+ },
1281
+ {
1282
+ "epoch": 0.825836216839677,
1283
+ "grad_norm": 1.1060293912887573,
1284
+ "learning_rate": 7.302190787233808e-06,
1285
+ "loss": 0.4462,
1286
+ "step": 1790
1287
+ },
1288
+ {
1289
+ "epoch": 0.8304498269896193,
1290
+ "grad_norm": 1.1734408140182495,
1291
+ "learning_rate": 7.266363070880424e-06,
1292
+ "loss": 0.4321,
1293
+ "step": 1800
1294
+ },
1295
+ {
1296
+ "epoch": 0.8350634371395617,
1297
+ "grad_norm": 0.9876635670661926,
1298
+ "learning_rate": 7.2303882934751965e-06,
1299
+ "loss": 0.4477,
1300
+ "step": 1810
1301
+ },
1302
+ {
1303
+ "epoch": 0.839677047289504,
1304
+ "grad_norm": 1.1338772773742676,
1305
+ "learning_rate": 7.194268789369875e-06,
1306
+ "loss": 0.4028,
1307
+ "step": 1820
1308
+ },
1309
+ {
1310
+ "epoch": 0.8442906574394463,
1311
+ "grad_norm": 0.9537489414215088,
1312
+ "learning_rate": 7.158006902307322e-06,
1313
+ "loss": 0.457,
1314
+ "step": 1830
1315
+ },
1316
+ {
1317
+ "epoch": 0.8489042675893888,
1318
+ "grad_norm": 1.215729832649231,
1319
+ "learning_rate": 7.121604985269423e-06,
1320
+ "loss": 0.4248,
1321
+ "step": 1840
1322
+ },
1323
+ {
1324
+ "epoch": 0.8535178777393311,
1325
+ "grad_norm": 1.3123574256896973,
1326
+ "learning_rate": 7.085065400324407e-06,
1327
+ "loss": 0.4731,
1328
+ "step": 1850
1329
+ },
1330
+ {
1331
+ "epoch": 0.8581314878892734,
1332
+ "grad_norm": 0.9171858429908752,
1333
+ "learning_rate": 7.048390518473579e-06,
1334
+ "loss": 0.3925,
1335
+ "step": 1860
1336
+ },
1337
+ {
1338
+ "epoch": 0.8627450980392157,
1339
+ "grad_norm": 1.195125937461853,
1340
+ "learning_rate": 7.011582719497466e-06,
1341
+ "loss": 0.4481,
1342
+ "step": 1870
1343
+ },
1344
+ {
1345
+ "epoch": 0.8673587081891581,
1346
+ "grad_norm": 1.029279112815857,
1347
+ "learning_rate": 6.974644391801395e-06,
1348
+ "loss": 0.4487,
1349
+ "step": 1880
1350
+ },
1351
+ {
1352
+ "epoch": 0.8719723183391004,
1353
+ "grad_norm": 1.345962643623352,
1354
+ "learning_rate": 6.9375779322605154e-06,
1355
+ "loss": 0.4424,
1356
+ "step": 1890
1357
+ },
1358
+ {
1359
+ "epoch": 0.8765859284890427,
1360
+ "grad_norm": 0.9620792269706726,
1361
+ "learning_rate": 6.900385746064268e-06,
1362
+ "loss": 0.4628,
1363
+ "step": 1900
1364
+ },
1365
+ {
1366
+ "epoch": 0.881199538638985,
1367
+ "grad_norm": 1.1548868417739868,
1368
+ "learning_rate": 6.863070246560319e-06,
1369
+ "loss": 0.4194,
1370
+ "step": 1910
1371
+ },
1372
+ {
1373
+ "epoch": 0.8858131487889274,
1374
+ "grad_norm": 0.8851338624954224,
1375
+ "learning_rate": 6.825633855097954e-06,
1376
+ "loss": 0.4404,
1377
+ "step": 1920
1378
+ },
1379
+ {
1380
+ "epoch": 0.8904267589388697,
1381
+ "grad_norm": 1.1914703845977783,
1382
+ "learning_rate": 6.788079000870966e-06,
1383
+ "loss": 0.4654,
1384
+ "step": 1930
1385
+ },
1386
+ {
1387
+ "epoch": 0.895040369088812,
1388
+ "grad_norm": 0.8694286346435547,
1389
+ "learning_rate": 6.7504081207600295e-06,
1390
+ "loss": 0.4849,
1391
+ "step": 1940
1392
+ },
1393
+ {
1394
+ "epoch": 0.8996539792387543,
1395
+ "grad_norm": 0.9843218326568604,
1396
+ "learning_rate": 6.712623659174569e-06,
1397
+ "loss": 0.4286,
1398
+ "step": 1950
1399
+ },
1400
+ {
1401
+ "epoch": 0.9042675893886967,
1402
+ "grad_norm": 1.0082261562347412,
1403
+ "learning_rate": 6.674728067894149e-06,
1404
+ "loss": 0.4271,
1405
+ "step": 1960
1406
+ },
1407
+ {
1408
+ "epoch": 0.908881199538639,
1409
+ "grad_norm": 1.0179473161697388,
1410
+ "learning_rate": 6.636723805909384e-06,
1411
+ "loss": 0.4384,
1412
+ "step": 1970
1413
+ },
1414
+ {
1415
+ "epoch": 0.9134948096885813,
1416
+ "grad_norm": 1.102802038192749,
1417
+ "learning_rate": 6.598613339262369e-06,
1418
+ "loss": 0.4058,
1419
+ "step": 1980
1420
+ },
1421
+ {
1422
+ "epoch": 0.9181084198385236,
1423
+ "grad_norm": 1.0184437036514282,
1424
+ "learning_rate": 6.560399140886673e-06,
1425
+ "loss": 0.4047,
1426
+ "step": 1990
1427
+ },
1428
+ {
1429
+ "epoch": 0.922722029988466,
1430
+ "grad_norm": 0.9515882134437561,
1431
+ "learning_rate": 6.522083690446863e-06,
1432
+ "loss": 0.4264,
1433
+ "step": 2000
1434
+ },
1435
+ {
1436
+ "epoch": 0.922722029988466,
1437
+ "eval_loss": 0.3640458583831787,
1438
+ "eval_runtime": 405.1587,
1439
+ "eval_samples_per_second": 8.32,
1440
+ "eval_steps_per_second": 1.19,
1441
+ "step": 2000
1442
+ },
1443
+ {
1444
+ "epoch": 0.9273356401384083,
1445
+ "grad_norm": 0.9829747080802917,
1446
+ "learning_rate": 6.483669474177609e-06,
1447
+ "loss": 0.4309,
1448
+ "step": 2010
1449
+ },
1450
+ {
1451
+ "epoch": 0.9319492502883506,
1452
+ "grad_norm": 1.134294033050537,
1453
+ "learning_rate": 6.445158984722358e-06,
1454
+ "loss": 0.4321,
1455
+ "step": 2020
1456
+ },
1457
+ {
1458
+ "epoch": 0.936562860438293,
1459
+ "grad_norm": 0.8324179649353027,
1460
+ "learning_rate": 6.406554720971583e-06,
1461
+ "loss": 0.4118,
1462
+ "step": 2030
1463
+ },
1464
+ {
1465
+ "epoch": 0.9411764705882353,
1466
+ "grad_norm": 0.9672048091888428,
1467
+ "learning_rate": 6.367859187900635e-06,
1468
+ "loss": 0.4508,
1469
+ "step": 2040
1470
+ },
1471
+ {
1472
+ "epoch": 0.9457900807381776,
1473
+ "grad_norm": 0.7900782823562622,
1474
+ "learning_rate": 6.329074896407202e-06,
1475
+ "loss": 0.4088,
1476
+ "step": 2050
1477
+ },
1478
+ {
1479
+ "epoch": 0.9504036908881199,
1480
+ "grad_norm": 1.2132816314697266,
1481
+ "learning_rate": 6.29020436314838e-06,
1482
+ "loss": 0.4,
1483
+ "step": 2060
1484
+ },
1485
+ {
1486
+ "epoch": 0.9550173010380623,
1487
+ "grad_norm": 1.028160810470581,
1488
+ "learning_rate": 6.251250110377368e-06,
1489
+ "loss": 0.4122,
1490
+ "step": 2070
1491
+ },
1492
+ {
1493
+ "epoch": 0.9596309111880046,
1494
+ "grad_norm": 0.979695200920105,
1495
+ "learning_rate": 6.212214665779805e-06,
1496
+ "loss": 0.4449,
1497
+ "step": 2080
1498
+ },
1499
+ {
1500
+ "epoch": 0.9642445213379469,
1501
+ "grad_norm": 0.845983624458313,
1502
+ "learning_rate": 6.173100562309751e-06,
1503
+ "loss": 0.4229,
1504
+ "step": 2090
1505
+ },
1506
+ {
1507
+ "epoch": 0.9688581314878892,
1508
+ "grad_norm": 1.1386796236038208,
1509
+ "learning_rate": 6.133910338025329e-06,
1510
+ "loss": 0.4389,
1511
+ "step": 2100
1512
+ },
1513
+ {
1514
+ "epoch": 0.9734717416378316,
1515
+ "grad_norm": 1.0641363859176636,
1516
+ "learning_rate": 6.094646535924026e-06,
1517
+ "loss": 0.4459,
1518
+ "step": 2110
1519
+ },
1520
+ {
1521
+ "epoch": 0.9780853517877739,
1522
+ "grad_norm": 1.2342710494995117,
1523
+ "learning_rate": 6.055311703777699e-06,
1524
+ "loss": 0.4556,
1525
+ "step": 2120
1526
+ },
1527
+ {
1528
+ "epoch": 0.9826989619377162,
1529
+ "grad_norm": 0.9290037751197815,
1530
+ "learning_rate": 6.0159083939672326e-06,
1531
+ "loss": 0.4837,
1532
+ "step": 2130
1533
+ },
1534
+ {
1535
+ "epoch": 0.9873125720876585,
1536
+ "grad_norm": 1.0555449724197388,
1537
+ "learning_rate": 5.976439163316936e-06,
1538
+ "loss": 0.4119,
1539
+ "step": 2140
1540
+ },
1541
+ {
1542
+ "epoch": 0.9919261822376009,
1543
+ "grad_norm": 1.001559853553772,
1544
+ "learning_rate": 5.936906572928625e-06,
1545
+ "loss": 0.4391,
1546
+ "step": 2150
1547
+ },
1548
+ {
1549
+ "epoch": 0.9965397923875432,
1550
+ "grad_norm": 1.120397686958313,
1551
+ "learning_rate": 5.897313188015433e-06,
1552
+ "loss": 0.4175,
1553
+ "step": 2160
1554
+ },
1555
+ {
1556
+ "epoch": 1.0009227220299886,
1557
+ "grad_norm": 1.0661535263061523,
1558
+ "learning_rate": 5.8576615777353725e-06,
1559
+ "loss": 0.4176,
1560
+ "step": 2170
1561
+ },
1562
+ {
1563
+ "epoch": 1.0055363321799309,
1564
+ "grad_norm": 0.8464300036430359,
1565
+ "learning_rate": 5.81795431502461e-06,
1566
+ "loss": 0.3182,
1567
+ "step": 2180
1568
+ },
1569
+ {
1570
+ "epoch": 1.0101499423298732,
1571
+ "grad_norm": 1.150085687637329,
1572
+ "learning_rate": 5.778193976430518e-06,
1573
+ "loss": 0.3412,
1574
+ "step": 2190
1575
+ },
1576
+ {
1577
+ "epoch": 1.0147635524798155,
1578
+ "grad_norm": 1.1552358865737915,
1579
+ "learning_rate": 5.738383141944493e-06,
1580
+ "loss": 0.3254,
1581
+ "step": 2200
1582
+ },
1583
+ {
1584
+ "epoch": 1.0193771626297579,
1585
+ "grad_norm": 0.8325443863868713,
1586
+ "learning_rate": 5.698524394834531e-06,
1587
+ "loss": 0.3121,
1588
+ "step": 2210
1589
+ },
1590
+ {
1591
+ "epoch": 1.0239907727797002,
1592
+ "grad_norm": 0.9441822171211243,
1593
+ "learning_rate": 5.658620321477613e-06,
1594
+ "loss": 0.309,
1595
+ "step": 2220
1596
+ },
1597
+ {
1598
+ "epoch": 1.0286043829296425,
1599
+ "grad_norm": 0.683917224407196,
1600
+ "learning_rate": 5.6186735111918735e-06,
1601
+ "loss": 0.2945,
1602
+ "step": 2230
1603
+ },
1604
+ {
1605
+ "epoch": 1.0332179930795848,
1606
+ "grad_norm": 1.1293641328811646,
1607
+ "learning_rate": 5.5786865560685855e-06,
1608
+ "loss": 0.3277,
1609
+ "step": 2240
1610
+ },
1611
+ {
1612
+ "epoch": 1.0378316032295272,
1613
+ "grad_norm": 1.0378141403198242,
1614
+ "learning_rate": 5.538662050803965e-06,
1615
+ "loss": 0.3337,
1616
+ "step": 2250
1617
+ },
1618
+ {
1619
+ "epoch": 1.0424452133794695,
1620
+ "grad_norm": 0.8613712787628174,
1621
+ "learning_rate": 5.498602592530799e-06,
1622
+ "loss": 0.3145,
1623
+ "step": 2260
1624
+ },
1625
+ {
1626
+ "epoch": 1.0470588235294118,
1627
+ "grad_norm": 0.8895742297172546,
1628
+ "learning_rate": 5.458510780649932e-06,
1629
+ "loss": 0.3016,
1630
+ "step": 2270
1631
+ },
1632
+ {
1633
+ "epoch": 1.0516724336793541,
1634
+ "grad_norm": 0.8962990045547485,
1635
+ "learning_rate": 5.41838921666158e-06,
1636
+ "loss": 0.3107,
1637
+ "step": 2280
1638
+ },
1639
+ {
1640
+ "epoch": 1.0562860438292965,
1641
+ "grad_norm": 1.1359519958496094,
1642
+ "learning_rate": 5.378240503996531e-06,
1643
+ "loss": 0.313,
1644
+ "step": 2290
1645
+ },
1646
+ {
1647
+ "epoch": 1.0608996539792388,
1648
+ "grad_norm": 0.9063310027122498,
1649
+ "learning_rate": 5.338067247847219e-06,
1650
+ "loss": 0.3186,
1651
+ "step": 2300
1652
+ },
1653
+ {
1654
+ "epoch": 1.0655132641291811,
1655
+ "grad_norm": 0.927183985710144,
1656
+ "learning_rate": 5.297872054998663e-06,
1657
+ "loss": 0.3198,
1658
+ "step": 2310
1659
+ },
1660
+ {
1661
+ "epoch": 1.0701268742791235,
1662
+ "grad_norm": 1.573792815208435,
1663
+ "learning_rate": 5.257657533659326e-06,
1664
+ "loss": 0.3181,
1665
+ "step": 2320
1666
+ },
1667
+ {
1668
+ "epoch": 1.0747404844290658,
1669
+ "grad_norm": 1.2177760601043701,
1670
+ "learning_rate": 5.217426293291869e-06,
1671
+ "loss": 0.3369,
1672
+ "step": 2330
1673
+ },
1674
+ {
1675
+ "epoch": 1.079354094579008,
1676
+ "grad_norm": 1.1653475761413574,
1677
+ "learning_rate": 5.177180944443821e-06,
1678
+ "loss": 0.311,
1679
+ "step": 2340
1680
+ },
1681
+ {
1682
+ "epoch": 1.0839677047289504,
1683
+ "grad_norm": 0.874153196811676,
1684
+ "learning_rate": 5.136924098578201e-06,
1685
+ "loss": 0.3109,
1686
+ "step": 2350
1687
+ },
1688
+ {
1689
+ "epoch": 1.0885813148788928,
1690
+ "grad_norm": 1.03621244430542,
1691
+ "learning_rate": 5.096658367904043e-06,
1692
+ "loss": 0.2808,
1693
+ "step": 2360
1694
+ },
1695
+ {
1696
+ "epoch": 1.093194925028835,
1697
+ "grad_norm": 1.4676544666290283,
1698
+ "learning_rate": 5.056386365206908e-06,
1699
+ "loss": 0.3435,
1700
+ "step": 2370
1701
+ },
1702
+ {
1703
+ "epoch": 1.0978085351787774,
1704
+ "grad_norm": 1.0248422622680664,
1705
+ "learning_rate": 5.016110703679341e-06,
1706
+ "loss": 0.3141,
1707
+ "step": 2380
1708
+ },
1709
+ {
1710
+ "epoch": 1.1024221453287197,
1711
+ "grad_norm": 1.0083783864974976,
1712
+ "learning_rate": 4.9758339967512995e-06,
1713
+ "loss": 0.3074,
1714
+ "step": 2390
1715
+ },
1716
+ {
1717
+ "epoch": 1.107035755478662,
1718
+ "grad_norm": 0.9300906658172607,
1719
+ "learning_rate": 4.935558857920576e-06,
1720
+ "loss": 0.3255,
1721
+ "step": 2400
1722
+ },
1723
+ {
1724
+ "epoch": 1.1116493656286044,
1725
+ "grad_norm": 1.1581122875213623,
1726
+ "learning_rate": 4.895287900583216e-06,
1727
+ "loss": 0.3007,
1728
+ "step": 2410
1729
+ },
1730
+ {
1731
+ "epoch": 1.1162629757785467,
1732
+ "grad_norm": 1.3533753156661987,
1733
+ "learning_rate": 4.855023737863927e-06,
1734
+ "loss": 0.3383,
1735
+ "step": 2420
1736
+ },
1737
+ {
1738
+ "epoch": 1.120876585928489,
1739
+ "grad_norm": 1.157009482383728,
1740
+ "learning_rate": 4.814768982446532e-06,
1741
+ "loss": 0.3207,
1742
+ "step": 2430
1743
+ },
1744
+ {
1745
+ "epoch": 1.1254901960784314,
1746
+ "grad_norm": 1.2679253816604614,
1747
+ "learning_rate": 4.774526246404417e-06,
1748
+ "loss": 0.3069,
1749
+ "step": 2440
1750
+ },
1751
+ {
1752
+ "epoch": 1.1301038062283737,
1753
+ "grad_norm": 1.1905463933944702,
1754
+ "learning_rate": 4.734298141031057e-06,
1755
+ "loss": 0.2949,
1756
+ "step": 2450
1757
+ },
1758
+ {
1759
+ "epoch": 1.134717416378316,
1760
+ "grad_norm": 0.9034658670425415,
1761
+ "learning_rate": 4.69408727667056e-06,
1762
+ "loss": 0.3602,
1763
+ "step": 2460
1764
+ },
1765
+ {
1766
+ "epoch": 1.1393310265282584,
1767
+ "grad_norm": 0.964447021484375,
1768
+ "learning_rate": 4.653896262548291e-06,
1769
+ "loss": 0.2999,
1770
+ "step": 2470
1771
+ },
1772
+ {
1773
+ "epoch": 1.1439446366782007,
1774
+ "grad_norm": 0.8305296897888184,
1775
+ "learning_rate": 4.613727706601558e-06,
1776
+ "loss": 0.3186,
1777
+ "step": 2480
1778
+ },
1779
+ {
1780
+ "epoch": 1.148558246828143,
1781
+ "grad_norm": 1.3243507146835327,
1782
+ "learning_rate": 4.573584215310394e-06,
1783
+ "loss": 0.2857,
1784
+ "step": 2490
1785
+ },
1786
+ {
1787
+ "epoch": 1.1531718569780853,
1788
+ "grad_norm": 1.1306155920028687,
1789
+ "learning_rate": 4.533468393528421e-06,
1790
+ "loss": 0.3188,
1791
+ "step": 2500
1792
+ },
1793
+ {
1794
+ "epoch": 1.1531718569780853,
1795
+ "eval_loss": 0.36377301812171936,
1796
+ "eval_runtime": 432.1909,
1797
+ "eval_samples_per_second": 7.8,
1798
+ "eval_steps_per_second": 1.115,
1799
+ "step": 2500
1800
+ },
1801
+ {
1802
+ "epoch": 1.1577854671280277,
1803
+ "grad_norm": 1.311614751815796,
1804
+ "learning_rate": 4.493382844313826e-06,
1805
+ "loss": 0.3255,
1806
+ "step": 2510
1807
+ },
1808
+ {
1809
+ "epoch": 1.16239907727797,
1810
+ "grad_norm": 1.1632609367370605,
1811
+ "learning_rate": 4.453330168760451e-06,
1812
+ "loss": 0.3408,
1813
+ "step": 2520
1814
+ },
1815
+ {
1816
+ "epoch": 1.1670126874279123,
1817
+ "grad_norm": 0.9437416195869446,
1818
+ "learning_rate": 4.41331296582902e-06,
1819
+ "loss": 0.3562,
1820
+ "step": 2530
1821
+ },
1822
+ {
1823
+ "epoch": 1.1716262975778546,
1824
+ "grad_norm": 1.4374769926071167,
1825
+ "learning_rate": 4.373333832178478e-06,
1826
+ "loss": 0.3049,
1827
+ "step": 2540
1828
+ },
1829
+ {
1830
+ "epoch": 1.176239907727797,
1831
+ "grad_norm": 0.9964131712913513,
1832
+ "learning_rate": 4.333395361997521e-06,
1833
+ "loss": 0.3223,
1834
+ "step": 2550
1835
+ },
1836
+ {
1837
+ "epoch": 1.1808535178777393,
1838
+ "grad_norm": 0.7799270749092102,
1839
+ "learning_rate": 4.293500146836241e-06,
1840
+ "loss": 0.2913,
1841
+ "step": 2560
1842
+ },
1843
+ {
1844
+ "epoch": 1.1854671280276816,
1845
+ "grad_norm": 1.0871920585632324,
1846
+ "learning_rate": 4.25365077543798e-06,
1847
+ "loss": 0.2823,
1848
+ "step": 2570
1849
+ },
1850
+ {
1851
+ "epoch": 1.190080738177624,
1852
+ "grad_norm": 1.0069403648376465,
1853
+ "learning_rate": 4.213849833571341e-06,
1854
+ "loss": 0.3583,
1855
+ "step": 2580
1856
+ },
1857
+ {
1858
+ "epoch": 1.1946943483275663,
1859
+ "grad_norm": 0.9551932215690613,
1860
+ "learning_rate": 4.174099903862403e-06,
1861
+ "loss": 0.3101,
1862
+ "step": 2590
1863
+ },
1864
+ {
1865
+ "epoch": 1.1993079584775086,
1866
+ "grad_norm": 0.7922395467758179,
1867
+ "learning_rate": 4.134403565627144e-06,
1868
+ "loss": 0.3311,
1869
+ "step": 2600
1870
+ },
1871
+ {
1872
+ "epoch": 1.203921568627451,
1873
+ "grad_norm": 1.0218504667282104,
1874
+ "learning_rate": 4.0947633947040616e-06,
1875
+ "loss": 0.3437,
1876
+ "step": 2610
1877
+ },
1878
+ {
1879
+ "epoch": 1.2085351787773932,
1880
+ "grad_norm": 1.01131272315979,
1881
+ "learning_rate": 4.055181963287044e-06,
1882
+ "loss": 0.2788,
1883
+ "step": 2620
1884
+ },
1885
+ {
1886
+ "epoch": 1.2131487889273356,
1887
+ "grad_norm": 0.9900946021080017,
1888
+ "learning_rate": 4.01566183975845e-06,
1889
+ "loss": 0.3188,
1890
+ "step": 2630
1891
+ },
1892
+ {
1893
+ "epoch": 1.217762399077278,
1894
+ "grad_norm": 1.219028353691101,
1895
+ "learning_rate": 3.9762055885224614e-06,
1896
+ "loss": 0.2936,
1897
+ "step": 2640
1898
+ },
1899
+ {
1900
+ "epoch": 1.2223760092272202,
1901
+ "grad_norm": 1.4025200605392456,
1902
+ "learning_rate": 3.936815769838682e-06,
1903
+ "loss": 0.266,
1904
+ "step": 2650
1905
+ },
1906
+ {
1907
+ "epoch": 1.2269896193771626,
1908
+ "grad_norm": 1.1445423364639282,
1909
+ "learning_rate": 3.897494939655996e-06,
1910
+ "loss": 0.315,
1911
+ "step": 2660
1912
+ },
1913
+ {
1914
+ "epoch": 1.2316032295271049,
1915
+ "grad_norm": 1.1953898668289185,
1916
+ "learning_rate": 3.8582456494467214e-06,
1917
+ "loss": 0.3161,
1918
+ "step": 2670
1919
+ },
1920
+ {
1921
+ "epoch": 1.2362168396770472,
1922
+ "grad_norm": 1.501749873161316,
1923
+ "learning_rate": 3.819070446041059e-06,
1924
+ "loss": 0.3216,
1925
+ "step": 2680
1926
+ },
1927
+ {
1928
+ "epoch": 1.2408304498269895,
1929
+ "grad_norm": 0.9947803616523743,
1930
+ "learning_rate": 3.779971871461813e-06,
1931
+ "loss": 0.3184,
1932
+ "step": 2690
1933
+ },
1934
+ {
1935
+ "epoch": 1.2454440599769319,
1936
+ "grad_norm": 0.9146224856376648,
1937
+ "learning_rate": 3.7409524627594607e-06,
1938
+ "loss": 0.3097,
1939
+ "step": 2700
1940
+ },
1941
+ {
1942
+ "epoch": 1.2500576701268744,
1943
+ "grad_norm": 1.1721278429031372,
1944
+ "learning_rate": 3.702014751847514e-06,
1945
+ "loss": 0.2805,
1946
+ "step": 2710
1947
+ },
1948
+ {
1949
+ "epoch": 1.2546712802768165,
1950
+ "grad_norm": 0.8447152972221375,
1951
+ "learning_rate": 3.6631612653382354e-06,
1952
+ "loss": 0.3199,
1953
+ "step": 2720
1954
+ },
1955
+ {
1956
+ "epoch": 1.259284890426759,
1957
+ "grad_norm": 1.5338748693466187,
1958
+ "learning_rate": 3.624394524378684e-06,
1959
+ "loss": 0.3204,
1960
+ "step": 2730
1961
+ },
1962
+ {
1963
+ "epoch": 1.2638985005767012,
1964
+ "grad_norm": 0.9287798404693604,
1965
+ "learning_rate": 3.585717044487126e-06,
1966
+ "loss": 0.3378,
1967
+ "step": 2740
1968
+ },
1969
+ {
1970
+ "epoch": 1.2685121107266437,
1971
+ "grad_norm": 1.4134514331817627,
1972
+ "learning_rate": 3.5471313353898056e-06,
1973
+ "loss": 0.3073,
1974
+ "step": 2750
1975
+ },
1976
+ {
1977
+ "epoch": 1.2731257208765858,
1978
+ "grad_norm": 1.08121919631958,
1979
+ "learning_rate": 3.5086399008580885e-06,
1980
+ "loss": 0.3255,
1981
+ "step": 2760
1982
+ },
1983
+ {
1984
+ "epoch": 1.2777393310265284,
1985
+ "grad_norm": 1.100626826286316,
1986
+ "learning_rate": 3.470245238546002e-06,
1987
+ "loss": 0.3108,
1988
+ "step": 2770
1989
+ },
1990
+ {
1991
+ "epoch": 1.2823529411764705,
1992
+ "grad_norm": 0.8085044622421265,
1993
+ "learning_rate": 3.4319498398281638e-06,
1994
+ "loss": 0.2944,
1995
+ "step": 2780
1996
+ },
1997
+ {
1998
+ "epoch": 1.286966551326413,
1999
+ "grad_norm": 1.2880297899246216,
2000
+ "learning_rate": 3.393756189638115e-06,
2001
+ "loss": 0.3167,
2002
+ "step": 2790
2003
+ },
2004
+ {
2005
+ "epoch": 1.2915801614763551,
2006
+ "grad_norm": 1.2243609428405762,
2007
+ "learning_rate": 3.355666766307084e-06,
2008
+ "loss": 0.3009,
2009
+ "step": 2800
2010
+ },
2011
+ {
2012
+ "epoch": 1.2961937716262977,
2013
+ "grad_norm": 0.699437141418457,
2014
+ "learning_rate": 3.3176840414031653e-06,
2015
+ "loss": 0.2878,
2016
+ "step": 2810
2017
+ },
2018
+ {
2019
+ "epoch": 1.3008073817762398,
2020
+ "grad_norm": 1.0296318531036377,
2021
+ "learning_rate": 3.2798104795709484e-06,
2022
+ "loss": 0.2743,
2023
+ "step": 2820
2024
+ },
2025
+ {
2026
+ "epoch": 1.3054209919261823,
2027
+ "grad_norm": 1.024989366531372,
2028
+ "learning_rate": 3.242048538371585e-06,
2029
+ "loss": 0.3117,
2030
+ "step": 2830
2031
+ },
2032
+ {
2033
+ "epoch": 1.3100346020761244,
2034
+ "grad_norm": 1.111118197441101,
2035
+ "learning_rate": 3.2044006681233226e-06,
2036
+ "loss": 0.3065,
2037
+ "step": 2840
2038
+ },
2039
+ {
2040
+ "epoch": 1.314648212226067,
2041
+ "grad_norm": 1.0204840898513794,
2042
+ "learning_rate": 3.1668693117425128e-06,
2043
+ "loss": 0.3182,
2044
+ "step": 2850
2045
+ },
2046
+ {
2047
+ "epoch": 1.3192618223760093,
2048
+ "grad_norm": 0.909860372543335,
2049
+ "learning_rate": 3.1294569045850844e-06,
2050
+ "loss": 0.3362,
2051
+ "step": 2860
2052
+ },
2053
+ {
2054
+ "epoch": 1.3238754325259516,
2055
+ "grad_norm": 1.1789814233779907,
2056
+ "learning_rate": 3.092165874288525e-06,
2057
+ "loss": 0.3202,
2058
+ "step": 2870
2059
+ },
2060
+ {
2061
+ "epoch": 1.328489042675894,
2062
+ "grad_norm": 0.8858640193939209,
2063
+ "learning_rate": 3.05499864061435e-06,
2064
+ "loss": 0.3061,
2065
+ "step": 2880
2066
+ },
2067
+ {
2068
+ "epoch": 1.3331026528258363,
2069
+ "grad_norm": 0.8976421356201172,
2070
+ "learning_rate": 3.017957615291088e-06,
2071
+ "loss": 0.2937,
2072
+ "step": 2890
2073
+ },
2074
+ {
2075
+ "epoch": 1.3377162629757786,
2076
+ "grad_norm": 1.0524935722351074,
2077
+ "learning_rate": 2.981045201857796e-06,
2078
+ "loss": 0.3056,
2079
+ "step": 2900
2080
+ },
2081
+ {
2082
+ "epoch": 1.342329873125721,
2083
+ "grad_norm": 1.0822246074676514,
2084
+ "learning_rate": 2.9442637955080787e-06,
2085
+ "loss": 0.2964,
2086
+ "step": 2910
2087
+ },
2088
+ {
2089
+ "epoch": 1.3469434832756633,
2090
+ "grad_norm": 1.043286681175232,
2091
+ "learning_rate": 2.9076157829346883e-06,
2092
+ "loss": 0.322,
2093
+ "step": 2920
2094
+ },
2095
+ {
2096
+ "epoch": 1.3515570934256056,
2097
+ "grad_norm": 1.2808856964111328,
2098
+ "learning_rate": 2.871103542174637e-06,
2099
+ "loss": 0.3186,
2100
+ "step": 2930
2101
+ },
2102
+ {
2103
+ "epoch": 1.356170703575548,
2104
+ "grad_norm": 0.9058982729911804,
2105
+ "learning_rate": 2.8347294424549075e-06,
2106
+ "loss": 0.2989,
2107
+ "step": 2940
2108
+ },
2109
+ {
2110
+ "epoch": 1.3607843137254902,
2111
+ "grad_norm": 0.8408973217010498,
2112
+ "learning_rate": 2.7984958440387045e-06,
2113
+ "loss": 0.3095,
2114
+ "step": 2950
2115
+ },
2116
+ {
2117
+ "epoch": 1.3653979238754326,
2118
+ "grad_norm": 0.7515527606010437,
2119
+ "learning_rate": 2.7624050980723032e-06,
2120
+ "loss": 0.3209,
2121
+ "step": 2960
2122
+ },
2123
+ {
2124
+ "epoch": 1.370011534025375,
2125
+ "grad_norm": 0.9075823426246643,
2126
+ "learning_rate": 2.726459546432488e-06,
2127
+ "loss": 0.3238,
2128
+ "step": 2970
2129
+ },
2130
+ {
2131
+ "epoch": 1.3746251441753172,
2132
+ "grad_norm": 0.7237765192985535,
2133
+ "learning_rate": 2.690661521574596e-06,
2134
+ "loss": 0.2856,
2135
+ "step": 2980
2136
+ },
2137
+ {
2138
+ "epoch": 1.3792387543252596,
2139
+ "grad_norm": 1.110317349433899,
2140
+ "learning_rate": 2.655013346381158e-06,
2141
+ "loss": 0.3145,
2142
+ "step": 2990
2143
+ },
2144
+ {
2145
+ "epoch": 1.3838523644752019,
2146
+ "grad_norm": 0.925359845161438,
2147
+ "learning_rate": 2.6195173340111767e-06,
2148
+ "loss": 0.2938,
2149
+ "step": 3000
2150
+ },
2151
+ {
2152
+ "epoch": 1.3838523644752019,
2153
+ "eval_loss": 0.3571609854698181,
2154
+ "eval_runtime": 407.4112,
2155
+ "eval_samples_per_second": 8.274,
2156
+ "eval_steps_per_second": 1.183,
2157
+ "step": 3000
2158
+ },
2159
+ {
2160
+ "epoch": 1.3884659746251442,
2161
+ "grad_norm": 0.768183171749115,
2162
+ "learning_rate": 2.5841757877500245e-06,
2163
+ "loss": 0.2978,
2164
+ "step": 3010
2165
+ },
2166
+ {
2167
+ "epoch": 1.3930795847750865,
2168
+ "grad_norm": 0.9414054751396179,
2169
+ "learning_rate": 2.548991000859997e-06,
2170
+ "loss": 0.2824,
2171
+ "step": 3020
2172
+ },
2173
+ {
2174
+ "epoch": 1.3976931949250289,
2175
+ "grad_norm": 1.148766040802002,
2176
+ "learning_rate": 2.513965256431488e-06,
2177
+ "loss": 0.3256,
2178
+ "step": 3030
2179
+ },
2180
+ {
2181
+ "epoch": 1.4023068050749712,
2182
+ "grad_norm": 1.0877196788787842,
2183
+ "learning_rate": 2.4791008272348656e-06,
2184
+ "loss": 0.3297,
2185
+ "step": 3040
2186
+ },
2187
+ {
2188
+ "epoch": 1.4069204152249135,
2189
+ "grad_norm": 1.0412601232528687,
2190
+ "learning_rate": 2.444399975572974e-06,
2191
+ "loss": 0.3279,
2192
+ "step": 3050
2193
+ },
2194
+ {
2195
+ "epoch": 1.4115340253748558,
2196
+ "grad_norm": 1.1179814338684082,
2197
+ "learning_rate": 2.40986495313435e-06,
2198
+ "loss": 0.3103,
2199
+ "step": 3060
2200
+ },
2201
+ {
2202
+ "epoch": 1.4161476355247982,
2203
+ "grad_norm": 0.7637813091278076,
2204
+ "learning_rate": 2.3754980008471074e-06,
2205
+ "loss": 0.3231,
2206
+ "step": 3070
2207
+ },
2208
+ {
2209
+ "epoch": 1.4207612456747405,
2210
+ "grad_norm": 1.0383415222167969,
2211
+ "learning_rate": 2.3413013487335332e-06,
2212
+ "loss": 0.3138,
2213
+ "step": 3080
2214
+ },
2215
+ {
2216
+ "epoch": 1.4253748558246828,
2217
+ "grad_norm": 0.8388441801071167,
2218
+ "learning_rate": 2.307277215765377e-06,
2219
+ "loss": 0.2695,
2220
+ "step": 3090
2221
+ },
2222
+ {
2223
+ "epoch": 1.4299884659746251,
2224
+ "grad_norm": 0.9990552663803101,
2225
+ "learning_rate": 2.273427809719867e-06,
2226
+ "loss": 0.2983,
2227
+ "step": 3100
2228
+ },
2229
+ {
2230
+ "epoch": 1.4346020761245675,
2231
+ "grad_norm": 0.8428414463996887,
2232
+ "learning_rate": 2.2397553270364546e-06,
2233
+ "loss": 0.3141,
2234
+ "step": 3110
2235
+ },
2236
+ {
2237
+ "epoch": 1.4392156862745098,
2238
+ "grad_norm": 0.9415843486785889,
2239
+ "learning_rate": 2.206261952674284e-06,
2240
+ "loss": 0.2959,
2241
+ "step": 3120
2242
+ },
2243
+ {
2244
+ "epoch": 1.4438292964244521,
2245
+ "grad_norm": 1.3723379373550415,
2246
+ "learning_rate": 2.172949859970422e-06,
2247
+ "loss": 0.3348,
2248
+ "step": 3130
2249
+ },
2250
+ {
2251
+ "epoch": 1.4484429065743945,
2252
+ "grad_norm": 0.8914421796798706,
2253
+ "learning_rate": 2.1398212104988273e-06,
2254
+ "loss": 0.3098,
2255
+ "step": 3140
2256
+ },
2257
+ {
2258
+ "epoch": 1.4530565167243368,
2259
+ "grad_norm": 0.8774773478507996,
2260
+ "learning_rate": 2.1068781539300874e-06,
2261
+ "loss": 0.2701,
2262
+ "step": 3150
2263
+ },
2264
+ {
2265
+ "epoch": 1.457670126874279,
2266
+ "grad_norm": 1.0844688415527344,
2267
+ "learning_rate": 2.0741228278919347e-06,
2268
+ "loss": 0.3135,
2269
+ "step": 3160
2270
+ },
2271
+ {
2272
+ "epoch": 1.4622837370242214,
2273
+ "grad_norm": 1.0603561401367188,
2274
+ "learning_rate": 2.0415573578305343e-06,
2275
+ "loss": 0.3234,
2276
+ "step": 3170
2277
+ },
2278
+ {
2279
+ "epoch": 1.4668973471741638,
2280
+ "grad_norm": 0.9056031703948975,
2281
+ "learning_rate": 2.0091838568725685e-06,
2282
+ "loss": 0.3034,
2283
+ "step": 3180
2284
+ },
2285
+ {
2286
+ "epoch": 1.471510957324106,
2287
+ "grad_norm": 0.8055946230888367,
2288
+ "learning_rate": 1.977004425688126e-06,
2289
+ "loss": 0.3403,
2290
+ "step": 3190
2291
+ },
2292
+ {
2293
+ "epoch": 1.4761245674740484,
2294
+ "grad_norm": 1.1034411191940308,
2295
+ "learning_rate": 1.945021152354379e-06,
2296
+ "loss": 0.3235,
2297
+ "step": 3200
2298
+ },
2299
+ {
2300
+ "epoch": 1.4807381776239907,
2301
+ "grad_norm": 1.3559411764144897,
2302
+ "learning_rate": 1.913236112220101e-06,
2303
+ "loss": 0.2852,
2304
+ "step": 3210
2305
+ },
2306
+ {
2307
+ "epoch": 1.485351787773933,
2308
+ "grad_norm": 1.1443620920181274,
2309
+ "learning_rate": 1.8816513677709935e-06,
2310
+ "loss": 0.3362,
2311
+ "step": 3220
2312
+ },
2313
+ {
2314
+ "epoch": 1.4899653979238754,
2315
+ "grad_norm": 0.9599668979644775,
2316
+ "learning_rate": 1.8502689684958664e-06,
2317
+ "loss": 0.2814,
2318
+ "step": 3230
2319
+ },
2320
+ {
2321
+ "epoch": 1.4945790080738177,
2322
+ "grad_norm": 1.053106665611267,
2323
+ "learning_rate": 1.8190909507536326e-06,
2324
+ "loss": 0.3092,
2325
+ "step": 3240
2326
+ },
2327
+ {
2328
+ "epoch": 1.49919261822376,
2329
+ "grad_norm": 0.979612410068512,
2330
+ "learning_rate": 1.7881193376411822e-06,
2331
+ "loss": 0.2931,
2332
+ "step": 3250
2333
+ },
2334
+ {
2335
+ "epoch": 1.5038062283737024,
2336
+ "grad_norm": 1.0935841798782349,
2337
+ "learning_rate": 1.7573561388621102e-06,
2338
+ "loss": 0.2852,
2339
+ "step": 3260
2340
+ },
2341
+ {
2342
+ "epoch": 1.5084198385236447,
2343
+ "grad_norm": 1.003023386001587,
2344
+ "learning_rate": 1.7268033505962972e-06,
2345
+ "loss": 0.3252,
2346
+ "step": 3270
2347
+ },
2348
+ {
2349
+ "epoch": 1.5130334486735872,
2350
+ "grad_norm": 0.8895878195762634,
2351
+ "learning_rate": 1.6964629553703893e-06,
2352
+ "loss": 0.2965,
2353
+ "step": 3280
2354
+ },
2355
+ {
2356
+ "epoch": 1.5176470588235293,
2357
+ "grad_norm": 1.2238770723342896,
2358
+ "learning_rate": 1.6663369219291558e-06,
2359
+ "loss": 0.3256,
2360
+ "step": 3290
2361
+ },
2362
+ {
2363
+ "epoch": 1.522260668973472,
2364
+ "grad_norm": 0.9977489709854126,
2365
+ "learning_rate": 1.6364272051077335e-06,
2366
+ "loss": 0.3087,
2367
+ "step": 3300
2368
+ },
2369
+ {
2370
+ "epoch": 1.526874279123414,
2371
+ "grad_norm": 0.8793919682502747,
2372
+ "learning_rate": 1.606735745704784e-06,
2373
+ "loss": 0.3082,
2374
+ "step": 3310
2375
+ },
2376
+ {
2377
+ "epoch": 1.5314878892733566,
2378
+ "grad_norm": 1.015448808670044,
2379
+ "learning_rate": 1.5772644703565564e-06,
2380
+ "loss": 0.3089,
2381
+ "step": 3320
2382
+ },
2383
+ {
2384
+ "epoch": 1.5361014994232987,
2385
+ "grad_norm": 0.9907401204109192,
2386
+ "learning_rate": 1.5480152914118784e-06,
2387
+ "loss": 0.312,
2388
+ "step": 3330
2389
+ },
2390
+ {
2391
+ "epoch": 1.5407151095732412,
2392
+ "grad_norm": 1.1181472539901733,
2393
+ "learning_rate": 1.5189901068080536e-06,
2394
+ "loss": 0.2756,
2395
+ "step": 3340
2396
+ },
2397
+ {
2398
+ "epoch": 1.5453287197231833,
2399
+ "grad_norm": 0.7450747489929199,
2400
+ "learning_rate": 1.4901907999477167e-06,
2401
+ "loss": 0.2931,
2402
+ "step": 3350
2403
+ },
2404
+ {
2405
+ "epoch": 1.5499423298731259,
2406
+ "grad_norm": 0.7395336031913757,
2407
+ "learning_rate": 1.4616192395766189e-06,
2408
+ "loss": 0.3312,
2409
+ "step": 3360
2410
+ },
2411
+ {
2412
+ "epoch": 1.554555940023068,
2413
+ "grad_norm": 1.0844025611877441,
2414
+ "learning_rate": 1.4332772796623655e-06,
2415
+ "loss": 0.2877,
2416
+ "step": 3370
2417
+ },
2418
+ {
2419
+ "epoch": 1.5591695501730105,
2420
+ "grad_norm": 1.0162688493728638,
2421
+ "learning_rate": 1.405166759274123e-06,
2422
+ "loss": 0.2865,
2423
+ "step": 3380
2424
+ },
2425
+ {
2426
+ "epoch": 1.5637831603229526,
2427
+ "grad_norm": 1.459636926651001,
2428
+ "learning_rate": 1.3772895024632753e-06,
2429
+ "loss": 0.2772,
2430
+ "step": 3390
2431
+ },
2432
+ {
2433
+ "epoch": 1.5683967704728952,
2434
+ "grad_norm": 1.1167926788330078,
2435
+ "learning_rate": 1.349647318145067e-06,
2436
+ "loss": 0.2826,
2437
+ "step": 3400
2438
+ },
2439
+ {
2440
+ "epoch": 1.5730103806228373,
2441
+ "grad_norm": 1.4571030139923096,
2442
+ "learning_rate": 1.3222419999812248e-06,
2443
+ "loss": 0.2582,
2444
+ "step": 3410
2445
+ },
2446
+ {
2447
+ "epoch": 1.5776239907727798,
2448
+ "grad_norm": 1.154638648033142,
2449
+ "learning_rate": 1.2950753262635712e-06,
2450
+ "loss": 0.3361,
2451
+ "step": 3420
2452
+ },
2453
+ {
2454
+ "epoch": 1.582237600922722,
2455
+ "grad_norm": 0.5414898991584778,
2456
+ "learning_rate": 1.2681490597986313e-06,
2457
+ "loss": 0.305,
2458
+ "step": 3430
2459
+ },
2460
+ {
2461
+ "epoch": 1.5868512110726645,
2462
+ "grad_norm": 0.8521725535392761,
2463
+ "learning_rate": 1.2414649477932511e-06,
2464
+ "loss": 0.2935,
2465
+ "step": 3440
2466
+ },
2467
+ {
2468
+ "epoch": 1.5914648212226066,
2469
+ "grad_norm": 1.0056465864181519,
2470
+ "learning_rate": 1.2150247217412186e-06,
2471
+ "loss": 0.3227,
2472
+ "step": 3450
2473
+ },
2474
+ {
2475
+ "epoch": 1.5960784313725491,
2476
+ "grad_norm": 1.3629816770553589,
2477
+ "learning_rate": 1.1888300973109112e-06,
2478
+ "loss": 0.3037,
2479
+ "step": 3460
2480
+ },
2481
+ {
2482
+ "epoch": 1.6006920415224912,
2483
+ "grad_norm": 1.0090680122375488,
2484
+ "learning_rate": 1.1628827742339688e-06,
2485
+ "loss": 0.3172,
2486
+ "step": 3470
2487
+ },
2488
+ {
2489
+ "epoch": 1.6053056516724338,
2490
+ "grad_norm": 1.347844123840332,
2491
+ "learning_rate": 1.1371844361950045e-06,
2492
+ "loss": 0.3046,
2493
+ "step": 3480
2494
+ },
2495
+ {
2496
+ "epoch": 1.6099192618223759,
2497
+ "grad_norm": 0.7335895299911499,
2498
+ "learning_rate": 1.1117367507223452e-06,
2499
+ "loss": 0.3107,
2500
+ "step": 3490
2501
+ },
2502
+ {
2503
+ "epoch": 1.6145328719723184,
2504
+ "grad_norm": 0.8737802505493164,
2505
+ "learning_rate": 1.0865413690798321e-06,
2506
+ "loss": 0.2891,
2507
+ "step": 3500
2508
+ },
2509
+ {
2510
+ "epoch": 1.6145328719723184,
2511
+ "eval_loss": 0.3522779047489166,
2512
+ "eval_runtime": 405.2938,
2513
+ "eval_samples_per_second": 8.317,
2514
+ "eval_steps_per_second": 1.189,
2515
+ "step": 3500
2516
+ },
2517
+ {
2518
+ "epoch": 1.6191464821222605,
2519
+ "grad_norm": 0.9549174904823303,
2520
+ "learning_rate": 1.061599926159676e-06,
2521
+ "loss": 0.3177,
2522
+ "step": 3510
2523
+ },
2524
+ {
2525
+ "epoch": 1.623760092272203,
2526
+ "grad_norm": 1.2092400789260864,
2527
+ "learning_rate": 1.036914040376364e-06,
2528
+ "loss": 0.2951,
2529
+ "step": 3520
2530
+ },
2531
+ {
2532
+ "epoch": 1.6283737024221452,
2533
+ "grad_norm": 0.9136941432952881,
2534
+ "learning_rate": 1.0124853135616475e-06,
2535
+ "loss": 0.273,
2536
+ "step": 3530
2537
+ },
2538
+ {
2539
+ "epoch": 1.6329873125720877,
2540
+ "grad_norm": 0.8041252493858337,
2541
+ "learning_rate": 9.883153308606035e-07,
2542
+ "loss": 0.307,
2543
+ "step": 3540
2544
+ },
2545
+ {
2546
+ "epoch": 1.6376009227220298,
2547
+ "grad_norm": 2.2038888931274414,
2548
+ "learning_rate": 9.644056606287727e-07,
2549
+ "loss": 0.3031,
2550
+ "step": 3550
2551
+ },
2552
+ {
2553
+ "epoch": 1.6422145328719724,
2554
+ "grad_norm": 0.995631754398346,
2555
+ "learning_rate": 9.407578543303913e-07,
2556
+ "loss": 0.3121,
2557
+ "step": 3560
2558
+ },
2559
+ {
2560
+ "epoch": 1.6468281430219145,
2561
+ "grad_norm": 1.1409215927124023,
2562
+ "learning_rate": 9.173734464377204e-07,
2563
+ "loss": 0.2709,
2564
+ "step": 3570
2565
+ },
2566
+ {
2567
+ "epoch": 1.651441753171857,
2568
+ "grad_norm": 1.1905242204666138,
2569
+ "learning_rate": 8.942539543314799e-07,
2570
+ "loss": 0.2877,
2571
+ "step": 3580
2572
+ },
2573
+ {
2574
+ "epoch": 1.6560553633217991,
2575
+ "grad_norm": 1.2991387844085693,
2576
+ "learning_rate": 8.714008782023797e-07,
2577
+ "loss": 0.306,
2578
+ "step": 3590
2579
+ },
2580
+ {
2581
+ "epoch": 1.6606689734717417,
2582
+ "grad_norm": 1.122862696647644,
2583
+ "learning_rate": 8.488157009537796e-07,
2584
+ "loss": 0.3156,
2585
+ "step": 3600
2586
+ },
2587
+ {
2588
+ "epoch": 1.665282583621684,
2589
+ "grad_norm": 1.0552375316619873,
2590
+ "learning_rate": 8.264998881054659e-07,
2591
+ "loss": 0.3164,
2592
+ "step": 3610
2593
+ },
2594
+ {
2595
+ "epoch": 1.6698961937716263,
2596
+ "grad_norm": 1.0529013872146606,
2597
+ "learning_rate": 8.044548876985531e-07,
2598
+ "loss": 0.2823,
2599
+ "step": 3620
2600
+ },
2601
+ {
2602
+ "epoch": 1.6745098039215687,
2603
+ "grad_norm": 1.0816291570663452,
2604
+ "learning_rate": 7.826821302015275e-07,
2605
+ "loss": 0.3184,
2606
+ "step": 3630
2607
+ },
2608
+ {
2609
+ "epoch": 1.679123414071511,
2610
+ "grad_norm": 0.7152329683303833,
2611
+ "learning_rate": 7.61183028417422e-07,
2612
+ "loss": 0.2956,
2613
+ "step": 3640
2614
+ },
2615
+ {
2616
+ "epoch": 1.6837370242214533,
2617
+ "grad_norm": 0.9819076061248779,
2618
+ "learning_rate": 7.399589773921412e-07,
2619
+ "loss": 0.3187,
2620
+ "step": 3650
2621
+ },
2622
+ {
2623
+ "epoch": 1.6883506343713957,
2624
+ "grad_norm": 0.6662834286689758,
2625
+ "learning_rate": 7.190113543239408e-07,
2626
+ "loss": 0.3194,
2627
+ "step": 3660
2628
+ },
2629
+ {
2630
+ "epoch": 1.692964244521338,
2631
+ "grad_norm": 1.200137734413147,
2632
+ "learning_rate": 6.983415184740616e-07,
2633
+ "loss": 0.2958,
2634
+ "step": 3670
2635
+ },
2636
+ {
2637
+ "epoch": 1.6975778546712803,
2638
+ "grad_norm": 0.8711331486701965,
2639
+ "learning_rate": 6.779508110785332e-07,
2640
+ "loss": 0.2761,
2641
+ "step": 3680
2642
+ },
2643
+ {
2644
+ "epoch": 1.7021914648212226,
2645
+ "grad_norm": 1.2060991525650024,
2646
+ "learning_rate": 6.578405552611361e-07,
2647
+ "loss": 0.2758,
2648
+ "step": 3690
2649
+ },
2650
+ {
2651
+ "epoch": 1.706805074971165,
2652
+ "grad_norm": 0.7914460897445679,
2653
+ "learning_rate": 6.380120559475505e-07,
2654
+ "loss": 0.3272,
2655
+ "step": 3700
2656
+ },
2657
+ {
2658
+ "epoch": 1.7114186851211073,
2659
+ "grad_norm": 1.0925244092941284,
2660
+ "learning_rate": 6.184665997806832e-07,
2661
+ "loss": 0.2947,
2662
+ "step": 3710
2663
+ },
2664
+ {
2665
+ "epoch": 1.7160322952710496,
2666
+ "grad_norm": 0.9509474635124207,
2667
+ "learning_rate": 5.992054550371723e-07,
2668
+ "loss": 0.3304,
2669
+ "step": 3720
2670
+ },
2671
+ {
2672
+ "epoch": 1.720645905420992,
2673
+ "grad_norm": 1.3933912515640259,
2674
+ "learning_rate": 5.802298715451016e-07,
2675
+ "loss": 0.3214,
2676
+ "step": 3730
2677
+ },
2678
+ {
2679
+ "epoch": 1.7252595155709343,
2680
+ "grad_norm": 1.1641534566879272,
2681
+ "learning_rate": 5.615410806028875e-07,
2682
+ "loss": 0.2974,
2683
+ "step": 3740
2684
+ },
2685
+ {
2686
+ "epoch": 1.7298731257208766,
2687
+ "grad_norm": 0.8583273887634277,
2688
+ "learning_rate": 5.431402948993947e-07,
2689
+ "loss": 0.2869,
2690
+ "step": 3750
2691
+ },
2692
+ {
2693
+ "epoch": 1.734486735870819,
2694
+ "grad_norm": 1.1167171001434326,
2695
+ "learning_rate": 5.250287084352373e-07,
2696
+ "loss": 0.329,
2697
+ "step": 3760
2698
+ },
2699
+ {
2700
+ "epoch": 1.7391003460207612,
2701
+ "grad_norm": 1.1780617237091064,
2702
+ "learning_rate": 5.072074964453055e-07,
2703
+ "loss": 0.3262,
2704
+ "step": 3770
2705
+ },
2706
+ {
2707
+ "epoch": 1.7437139561707036,
2708
+ "grad_norm": 1.1685618162155151,
2709
+ "learning_rate": 4.896778153225062e-07,
2710
+ "loss": 0.2963,
2711
+ "step": 3780
2712
+ },
2713
+ {
2714
+ "epoch": 1.748327566320646,
2715
+ "grad_norm": 1.0674740076065063,
2716
+ "learning_rate": 4.7244080254272795e-07,
2717
+ "loss": 0.3124,
2718
+ "step": 3790
2719
+ },
2720
+ {
2721
+ "epoch": 1.7529411764705882,
2722
+ "grad_norm": 1.2227847576141357,
2723
+ "learning_rate": 4.55497576591028e-07,
2724
+ "loss": 0.293,
2725
+ "step": 3800
2726
+ },
2727
+ {
2728
+ "epoch": 1.7575547866205306,
2729
+ "grad_norm": 1.0834511518478394,
2730
+ "learning_rate": 4.3884923688905676e-07,
2731
+ "loss": 0.3092,
2732
+ "step": 3810
2733
+ },
2734
+ {
2735
+ "epoch": 1.7621683967704729,
2736
+ "grad_norm": 0.7183946371078491,
2737
+ "learning_rate": 4.224968637237198e-07,
2738
+ "loss": 0.2644,
2739
+ "step": 3820
2740
+ },
2741
+ {
2742
+ "epoch": 1.7667820069204152,
2743
+ "grad_norm": 1.1382250785827637,
2744
+ "learning_rate": 4.064415181770787e-07,
2745
+ "loss": 0.2823,
2746
+ "step": 3830
2747
+ },
2748
+ {
2749
+ "epoch": 1.7713956170703575,
2750
+ "grad_norm": 1.1042758226394653,
2751
+ "learning_rate": 3.90684242057498e-07,
2752
+ "loss": 0.3121,
2753
+ "step": 3840
2754
+ },
2755
+ {
2756
+ "epoch": 1.7760092272202999,
2757
+ "grad_norm": 0.8227053284645081,
2758
+ "learning_rate": 3.752260578320427e-07,
2759
+ "loss": 0.3145,
2760
+ "step": 3850
2761
+ },
2762
+ {
2763
+ "epoch": 1.7806228373702422,
2764
+ "grad_norm": 1.0205223560333252,
2765
+ "learning_rate": 3.600679685601349e-07,
2766
+ "loss": 0.3086,
2767
+ "step": 3860
2768
+ },
2769
+ {
2770
+ "epoch": 1.7852364475201845,
2771
+ "grad_norm": 0.8816052675247192,
2772
+ "learning_rate": 3.4521095782846623e-07,
2773
+ "loss": 0.2978,
2774
+ "step": 3870
2775
+ },
2776
+ {
2777
+ "epoch": 1.7898500576701268,
2778
+ "grad_norm": 1.44774329662323,
2779
+ "learning_rate": 3.306559896871714e-07,
2780
+ "loss": 0.3016,
2781
+ "step": 3880
2782
+ },
2783
+ {
2784
+ "epoch": 1.7944636678200692,
2785
+ "grad_norm": 0.9304317235946655,
2786
+ "learning_rate": 3.164040085872755e-07,
2787
+ "loss": 0.3066,
2788
+ "step": 3890
2789
+ },
2790
+ {
2791
+ "epoch": 1.7990772779700115,
2792
+ "grad_norm": 1.0888575315475464,
2793
+ "learning_rate": 3.0245593931940766e-07,
2794
+ "loss": 0.2851,
2795
+ "step": 3900
2796
+ },
2797
+ {
2798
+ "epoch": 1.8036908881199538,
2799
+ "grad_norm": 0.8086104989051819,
2800
+ "learning_rate": 2.8881268695379436e-07,
2801
+ "loss": 0.2901,
2802
+ "step": 3910
2803
+ },
2804
+ {
2805
+ "epoch": 1.8083044982698961,
2806
+ "grad_norm": 0.7356364727020264,
2807
+ "learning_rate": 2.7547513678153005e-07,
2808
+ "loss": 0.2997,
2809
+ "step": 3920
2810
+ },
2811
+ {
2812
+ "epoch": 1.8129181084198385,
2813
+ "grad_norm": 1.017858624458313,
2814
+ "learning_rate": 2.624441542571327e-07,
2815
+ "loss": 0.3282,
2816
+ "step": 3930
2817
+ },
2818
+ {
2819
+ "epoch": 1.8175317185697808,
2820
+ "grad_norm": 0.9155429601669312,
2821
+ "learning_rate": 2.497205849423834e-07,
2822
+ "loss": 0.2596,
2823
+ "step": 3940
2824
+ },
2825
+ {
2826
+ "epoch": 1.8221453287197233,
2827
+ "grad_norm": 0.9723671078681946,
2828
+ "learning_rate": 2.3730525445146146e-07,
2829
+ "loss": 0.3077,
2830
+ "step": 3950
2831
+ },
2832
+ {
2833
+ "epoch": 1.8267589388696654,
2834
+ "grad_norm": 1.2884184122085571,
2835
+ "learning_rate": 2.25198968397371e-07,
2836
+ "loss": 0.3411,
2837
+ "step": 3960
2838
+ },
2839
+ {
2840
+ "epoch": 1.831372549019608,
2841
+ "grad_norm": 0.9986656308174133,
2842
+ "learning_rate": 2.134025123396638e-07,
2843
+ "loss": 0.2816,
2844
+ "step": 3970
2845
+ },
2846
+ {
2847
+ "epoch": 1.83598615916955,
2848
+ "grad_norm": 0.7283441424369812,
2849
+ "learning_rate": 2.019166517334703e-07,
2850
+ "loss": 0.3093,
2851
+ "step": 3980
2852
+ },
2853
+ {
2854
+ "epoch": 1.8405997693194927,
2855
+ "grad_norm": 1.1408605575561523,
2856
+ "learning_rate": 1.9074213187982416e-07,
2857
+ "loss": 0.2848,
2858
+ "step": 3990
2859
+ },
2860
+ {
2861
+ "epoch": 1.8452133794694348,
2862
+ "grad_norm": 1.2907861471176147,
2863
+ "learning_rate": 1.7987967787730541e-07,
2864
+ "loss": 0.3013,
2865
+ "step": 4000
2866
+ },
2867
+ {
2868
+ "epoch": 1.8452133794694348,
2869
+ "eval_loss": 0.3492071032524109,
2870
+ "eval_runtime": 413.2138,
2871
+ "eval_samples_per_second": 8.158,
2872
+ "eval_steps_per_second": 1.166,
2873
+ "step": 4000
2874
+ },
2875
+ {
2876
+ "epoch": 1.8498269896193773,
2877
+ "grad_norm": 0.9750341176986694,
2878
+ "learning_rate": 1.6932999457498823e-07,
2879
+ "loss": 0.2951,
2880
+ "step": 4010
2881
+ },
2882
+ {
2883
+ "epoch": 1.8544405997693194,
2884
+ "grad_norm": 1.0746599435806274,
2885
+ "learning_rate": 1.5909376652670283e-07,
2886
+ "loss": 0.2518,
2887
+ "step": 4020
2888
+ },
2889
+ {
2890
+ "epoch": 1.859054209919262,
2891
+ "grad_norm": 1.2660961151123047,
2892
+ "learning_rate": 1.4917165794661849e-07,
2893
+ "loss": 0.3032,
2894
+ "step": 4030
2895
+ },
2896
+ {
2897
+ "epoch": 1.863667820069204,
2898
+ "grad_norm": 0.8381269574165344,
2899
+ "learning_rate": 1.395643126661428e-07,
2900
+ "loss": 0.2954,
2901
+ "step": 4040
2902
+ },
2903
+ {
2904
+ "epoch": 1.8682814302191466,
2905
+ "grad_norm": 0.9705010056495667,
2906
+ "learning_rate": 1.302723540921419e-07,
2907
+ "loss": 0.2756,
2908
+ "step": 4050
2909
+ },
2910
+ {
2911
+ "epoch": 1.8728950403690887,
2912
+ "grad_norm": 1.080946445465088,
2913
+ "learning_rate": 1.212963851664928e-07,
2914
+ "loss": 0.2792,
2915
+ "step": 4060
2916
+ },
2917
+ {
2918
+ "epoch": 1.8775086505190313,
2919
+ "grad_norm": 1.0065879821777344,
2920
+ "learning_rate": 1.1263698832695513e-07,
2921
+ "loss": 0.3286,
2922
+ "step": 4070
2923
+ },
2924
+ {
2925
+ "epoch": 1.8821222606689734,
2926
+ "grad_norm": 0.9605699181556702,
2927
+ "learning_rate": 1.0429472546938158e-07,
2928
+ "loss": 0.2919,
2929
+ "step": 4080
2930
+ },
2931
+ {
2932
+ "epoch": 1.886735870818916,
2933
+ "grad_norm": 1.2412244081497192,
2934
+ "learning_rate": 9.627013791125294e-08,
2935
+ "loss": 0.3285,
2936
+ "step": 4090
2937
+ },
2938
+ {
2939
+ "epoch": 1.891349480968858,
2940
+ "grad_norm": 1.0108641386032104,
2941
+ "learning_rate": 8.856374635655696e-08,
2942
+ "loss": 0.3172,
2943
+ "step": 4100
2944
+ },
2945
+ {
2946
+ "epoch": 1.8959630911188006,
2947
+ "grad_norm": 1.0112003087997437,
2948
+ "learning_rate": 8.117605086199686e-08,
2949
+ "loss": 0.2838,
2950
+ "step": 4110
2951
+ },
2952
+ {
2953
+ "epoch": 1.9005767012687427,
2954
+ "grad_norm": 0.8483492136001587,
2955
+ "learning_rate": 7.410753080454746e-08,
2956
+ "loss": 0.3001,
2957
+ "step": 4120
2958
+ },
2959
+ {
2960
+ "epoch": 1.9051903114186852,
2961
+ "grad_norm": 0.8492743372917175,
2962
+ "learning_rate": 6.735864485034493e-08,
2963
+ "loss": 0.2853,
2964
+ "step": 4130
2965
+ },
2966
+ {
2967
+ "epoch": 1.9098039215686273,
2968
+ "grad_norm": 1.6273894309997559,
2969
+ "learning_rate": 6.092983092492844e-08,
2970
+ "loss": 0.3221,
2971
+ "step": 4140
2972
+ },
2973
+ {
2974
+ "epoch": 1.9144175317185699,
2975
+ "grad_norm": 1.2614809274673462,
2976
+ "learning_rate": 5.482150618481952e-08,
2977
+ "loss": 0.2815,
2978
+ "step": 4150
2979
+ },
2980
+ {
2981
+ "epoch": 1.919031141868512,
2982
+ "grad_norm": 1.0911434888839722,
2983
+ "learning_rate": 4.9034066990457094e-08,
2984
+ "loss": 0.3051,
2985
+ "step": 4160
2986
+ },
2987
+ {
2988
+ "epoch": 1.9236447520184545,
2989
+ "grad_norm": 1.4605953693389893,
2990
+ "learning_rate": 4.356788888047747e-08,
2991
+ "loss": 0.301,
2992
+ "step": 4170
2993
+ },
2994
+ {
2995
+ "epoch": 1.9282583621683966,
2996
+ "grad_norm": 0.8996632099151611,
2997
+ "learning_rate": 3.8423326547344376e-08,
2998
+ "loss": 0.297,
2999
+ "step": 4180
3000
+ },
3001
+ {
3002
+ "epoch": 1.9328719723183392,
3003
+ "grad_norm": 0.7587381601333618,
3004
+ "learning_rate": 3.360071381433516e-08,
3005
+ "loss": 0.309,
3006
+ "step": 4190
3007
+ },
3008
+ {
3009
+ "epoch": 1.9374855824682813,
3010
+ "grad_norm": 0.9873301386833191,
3011
+ "learning_rate": 2.9100363613879246e-08,
3012
+ "loss": 0.2899,
3013
+ "step": 4200
3014
+ },
3015
+ {
3016
+ "epoch": 1.9420991926182238,
3017
+ "grad_norm": 1.2133296728134155,
3018
+ "learning_rate": 2.492256796725212e-08,
3019
+ "loss": 0.2905,
3020
+ "step": 4210
3021
+ },
3022
+ {
3023
+ "epoch": 1.946712802768166,
3024
+ "grad_norm": 1.017208456993103,
3025
+ "learning_rate": 2.1067597965624963e-08,
3026
+ "loss": 0.3042,
3027
+ "step": 4220
3028
+ },
3029
+ {
3030
+ "epoch": 1.9513264129181085,
3031
+ "grad_norm": 1.4077831506729126,
3032
+ "learning_rate": 1.753570375247815e-08,
3033
+ "loss": 0.2793,
3034
+ "step": 4230
3035
+ },
3036
+ {
3037
+ "epoch": 1.9559400230680506,
3038
+ "grad_norm": 1.1960562467575073,
3039
+ "learning_rate": 1.4327114507365347e-08,
3040
+ "loss": 0.3012,
3041
+ "step": 4240
3042
+ },
3043
+ {
3044
+ "epoch": 1.9605536332179931,
3045
+ "grad_norm": 1.1810780763626099,
3046
+ "learning_rate": 1.1442038431044856e-08,
3047
+ "loss": 0.2987,
3048
+ "step": 4250
3049
+ },
3050
+ {
3051
+ "epoch": 1.9651672433679352,
3052
+ "grad_norm": 1.0378133058547974,
3053
+ "learning_rate": 8.880662731968748e-09,
3054
+ "loss": 0.3028,
3055
+ "step": 4260
3056
+ },
3057
+ {
3058
+ "epoch": 1.9697808535178778,
3059
+ "grad_norm": 0.9532252550125122,
3060
+ "learning_rate": 6.6431536141348115e-09,
3061
+ "loss": 0.327,
3062
+ "step": 4270
3063
+ },
3064
+ {
3065
+ "epoch": 1.9743944636678201,
3066
+ "grad_norm": 0.8077103495597839,
3067
+ "learning_rate": 4.729656266304061e-09,
3068
+ "loss": 0.2861,
3069
+ "step": 4280
3070
+ },
3071
+ {
3072
+ "epoch": 1.9790080738177624,
3073
+ "grad_norm": 1.0243134498596191,
3074
+ "learning_rate": 3.1402948525766085e-09,
3075
+ "loss": 0.3224,
3076
+ "step": 4290
3077
+ },
3078
+ {
3079
+ "epoch": 1.9836216839677048,
3080
+ "grad_norm": 1.4665522575378418,
3081
+ "learning_rate": 1.8751725043375526e-09,
3082
+ "loss": 0.2968,
3083
+ "step": 4300
3084
+ },
3085
+ {
3086
+ "epoch": 1.988235294117647,
3087
+ "grad_norm": 1.1807401180267334,
3088
+ "learning_rate": 9.343713135623323e-10,
3089
+ "loss": 0.3063,
3090
+ "step": 4310
3091
+ },
3092
+ {
3093
+ "epoch": 1.9928489042675894,
3094
+ "grad_norm": 1.1453770399093628,
3095
+ "learning_rate": 3.1795232749320947e-10,
3096
+ "loss": 0.2943,
3097
+ "step": 4320
3098
+ },
3099
+ {
3100
+ "epoch": 1.9974625144175318,
3101
+ "grad_norm": 1.3085640668869019,
3102
+ "learning_rate": 2.5955544673550438e-11,
3103
+ "loss": 0.2894,
3104
+ "step": 4330
3105
+ },
3106
+ {
3107
+ "epoch": 1.9993079584775086,
3108
+ "step": 4334,
3109
+ "total_flos": 545751381377024.0,
3110
+ "train_loss": 0.3850643413100971,
3111
+ "train_runtime": 89295.2192,
3112
+ "train_samples_per_second": 0.68,
3113
+ "train_steps_per_second": 0.049
3114
+ }
3115
+ ],
3116
+ "logging_steps": 10,
3117
+ "max_steps": 4334,
3118
+ "num_input_tokens_seen": 0,
3119
+ "num_train_epochs": 2,
3120
+ "save_steps": 500,
3121
+ "stateful_callbacks": {
3122
+ "TrainerControl": {
3123
+ "args": {
3124
+ "should_epoch_stop": false,
3125
+ "should_evaluate": false,
3126
+ "should_log": false,
3127
+ "should_save": true,
3128
+ "should_training_stop": true
3129
+ },
3130
+ "attributes": {}
3131
+ }
3132
+ },
3133
+ "total_flos": 545751381377024.0,
3134
+ "train_batch_size": 1,
3135
+ "trial_name": null,
3136
+ "trial_params": null
3137
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a35127ad4b0c9c29592d5922a4090bcd7436ded6fb9a3c22a375247290b1621
3
+ size 7672
training_eval_loss.png ADDED
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff